From 615fe68889bbdd33ed63eac0a291fbd4dbb44d2f Mon Sep 17 00:00:00 2001 From: wuyuhan Date: Tue, 9 Apr 2024 16:07:46 +0800 Subject: [PATCH 01/21] =?UTF-8?q?att-advisor=E4=B8=8Ema-advisor=E5=90=88?= =?UTF-8?q?=E4=B8=80=E6=A1=86=E6=9E=B6=E6=9E=84=E5=BB=BA?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 6 + profiler/advisor/README.md | 41 -- profiler/advisor/__init__.py | 15 - profiler/advisor/advisor_backend/__init__.py | 14 - .../advisor/advisor_backend/advice_base.py | 50 -- .../advice_factory/__init__.py | 14 - .../advice_factory/advice_factory.py | 50 -- .../advice_factory/cluster_advice_factory.py | 38 -- .../advice_factory/compute_advice_factory.py | 34 - .../advice_factory/overall_advice_factory.py | 32 - .../advice_factory/timeline_advice_factory.py | 34 - .../cluster_advice/__init__.py | 14 - .../cluster_advice/cluster_advice_base.py | 67 -- .../cluster_advice/cluster_pipeline_advice.py | 437 ------------ .../cluster_advice/kernel_cluster_advice.py | 62 -- .../cluster_advice/slow_link_advice.py | 110 --- .../cluster_advice/slow_rank_advice.py | 71 -- .../common_func_advisor/__init__.py | 14 - .../common_func_advisor/constant.py | 225 ------- .../common_func_advisor/trace_view_json.py | 209 ------ .../trace_view_preprocessor.py | 208 ------ .../compute_advice/__init__.py | 14 - .../compute_advice/compute_advice_base.py | 105 --- .../compute_advice/npu_fused/__init__.py | 14 - .../compute_advice/npu_fused/csv_analyzer.py | 81 --- .../compute_advice/npu_fused/json_analyzer.py | 55 -- .../compute_advice/npu_fused/op_perf.py | 196 ------ .../compute_advice/npu_fused_advice.py | 71 -- .../compute_advice/npu_slow_advice.py | 82 --- profiler/advisor/advisor_backend/interface.py | 62 -- .../overall_advice/overall_summary_advice.py | 174 ----- .../prof_bean_advisor/__init__.py | 14 - .../cluster_step_trace_time_bean.py | 67 -- .../timeline_advice/__init__.py | 14 - .../timeline_advice/op_schedule_advice.py | 89 --- .../timeline_advice/optimizer_advice.py | 55 -- .../timeline_advice/timeline_advice_base.py | 99 --- .../overall_advice => analyzer}/__init__.py | 0 profiler/advisor/analyzer/base_analyzer.py | 16 + .../analyzer/communication/__init__.py | 0 .../communication/bandwidth/__init__.py | 0 .../communication/environment/__init__.py | 0 .../advisor/analyzer/computing/__init__.py | 0 .../analyzer/computing/aicpu/__init__.py | 0 .../analyzer/computing/bound/__init__.py | 0 .../analyzer/computing/op_compile/__init__.py | 0 .../advisor/analyzer/dataloader/__init__.py | 0 profiler/advisor/analyzer/overall/__init__.py | 0 .../advisor/analyzer/scheduling/__init__.py | 0 .../scheduling/free_event/__init__.py | 0 .../scheduling/fusion_ops/__init__.py | 0 profiler/advisor/cluster_perf_analysis.ipynb | 625 ------------------ profiler/advisor/common/__init__.py | 0 profiler/advisor/common/constant.py | 106 +++ profiler/advisor/common/module_lib.py | 87 +++ profiler/advisor/common/timeline/__init__.py | 0 profiler/advisor/common/timeline/event.py | 23 + .../advisor/common/timeline/fusion_ops_db.py | 555 ++++++++++++++++ profiler/advisor/common/version_control.py | 26 + profiler/advisor/compute_perf_analysis.ipynb | 366 ---------- profiler/advisor/config/__init__.py | 0 profiler/advisor/config/config.ini | 16 + profiler/advisor/config/config.py | 103 +++ profiler/advisor/dataset/__init__.py | 6 + profiler/advisor/display/__init__.py | 0 profiler/advisor/display/html/__init__.py | 0 profiler/advisor/display/html/render.py | 44 ++ .../display/html/templates/affinity_api.html | 50 ++ .../advisor/display/html/templates/main.html | 202 ++++++ .../html/templates/overall_analysis.html | 15 + profiler/advisor/img/advisor_result.PNG | Bin 53557 -> 0 bytes profiler/advisor/img/jupyter_report.PNG | Bin 34097 -> 0 bytes profiler/advisor/interface/__init__.py | 0 profiler/advisor/interface/interface.py | 67 ++ profiler/advisor/overall_perf_analysis.ipynb | 323 --------- profiler/advisor/result/__init__.py | 0 profiler/advisor/result/item.py | 61 ++ profiler/advisor/result/result.py | 201 ++++++ profiler/advisor/rules/__init__.py | 0 .../advisor/rules/timeline_fusion_ops.yaml | 59 ++ profiler/advisor/timeline_perf_analysis.ipynb | 163 ----- profiler/advisor/utils/__init__.py | 0 profiler/advisor/utils/log.py | 63 ++ profiler/advisor/utils/tools.py | 76 +++ profiler/advisor/utils/utils.py | 499 ++++++++++++++ profiler/advisor/version.py | 38 ++ 86 files changed, 2319 insertions(+), 4408 deletions(-) delete mode 100644 profiler/advisor/README.md delete mode 100644 profiler/advisor/advisor_backend/__init__.py delete mode 100644 profiler/advisor/advisor_backend/advice_base.py delete mode 100644 profiler/advisor/advisor_backend/advice_factory/__init__.py delete mode 100644 profiler/advisor/advisor_backend/advice_factory/advice_factory.py delete mode 100644 profiler/advisor/advisor_backend/advice_factory/cluster_advice_factory.py delete mode 100644 profiler/advisor/advisor_backend/advice_factory/compute_advice_factory.py delete mode 100644 profiler/advisor/advisor_backend/advice_factory/overall_advice_factory.py delete mode 100644 profiler/advisor/advisor_backend/advice_factory/timeline_advice_factory.py delete mode 100644 profiler/advisor/advisor_backend/cluster_advice/__init__.py delete mode 100644 profiler/advisor/advisor_backend/cluster_advice/cluster_advice_base.py delete mode 100644 profiler/advisor/advisor_backend/cluster_advice/cluster_pipeline_advice.py delete mode 100644 profiler/advisor/advisor_backend/cluster_advice/kernel_cluster_advice.py delete mode 100644 profiler/advisor/advisor_backend/cluster_advice/slow_link_advice.py delete mode 100644 profiler/advisor/advisor_backend/cluster_advice/slow_rank_advice.py delete mode 100644 profiler/advisor/advisor_backend/common_func_advisor/__init__.py delete mode 100644 profiler/advisor/advisor_backend/common_func_advisor/constant.py delete mode 100644 profiler/advisor/advisor_backend/common_func_advisor/trace_view_json.py delete mode 100644 profiler/advisor/advisor_backend/common_func_advisor/trace_view_preprocessor.py delete mode 100644 profiler/advisor/advisor_backend/compute_advice/__init__.py delete mode 100644 profiler/advisor/advisor_backend/compute_advice/compute_advice_base.py delete mode 100644 profiler/advisor/advisor_backend/compute_advice/npu_fused/__init__.py delete mode 100644 profiler/advisor/advisor_backend/compute_advice/npu_fused/csv_analyzer.py delete mode 100644 profiler/advisor/advisor_backend/compute_advice/npu_fused/json_analyzer.py delete mode 100644 profiler/advisor/advisor_backend/compute_advice/npu_fused/op_perf.py delete mode 100644 profiler/advisor/advisor_backend/compute_advice/npu_fused_advice.py delete mode 100644 profiler/advisor/advisor_backend/compute_advice/npu_slow_advice.py delete mode 100644 profiler/advisor/advisor_backend/interface.py delete mode 100644 profiler/advisor/advisor_backend/overall_advice/overall_summary_advice.py delete mode 100644 profiler/advisor/advisor_backend/prof_bean_advisor/__init__.py delete mode 100644 profiler/advisor/advisor_backend/prof_bean_advisor/cluster_step_trace_time_bean.py delete mode 100644 profiler/advisor/advisor_backend/timeline_advice/__init__.py delete mode 100644 profiler/advisor/advisor_backend/timeline_advice/op_schedule_advice.py delete mode 100644 profiler/advisor/advisor_backend/timeline_advice/optimizer_advice.py delete mode 100644 profiler/advisor/advisor_backend/timeline_advice/timeline_advice_base.py rename profiler/advisor/{advisor_backend/overall_advice => analyzer}/__init__.py (100%) create mode 100644 profiler/advisor/analyzer/base_analyzer.py create mode 100644 profiler/advisor/analyzer/communication/__init__.py create mode 100644 profiler/advisor/analyzer/communication/bandwidth/__init__.py create mode 100644 profiler/advisor/analyzer/communication/environment/__init__.py create mode 100644 profiler/advisor/analyzer/computing/__init__.py create mode 100644 profiler/advisor/analyzer/computing/aicpu/__init__.py create mode 100644 profiler/advisor/analyzer/computing/bound/__init__.py create mode 100644 profiler/advisor/analyzer/computing/op_compile/__init__.py create mode 100644 profiler/advisor/analyzer/dataloader/__init__.py create mode 100644 profiler/advisor/analyzer/overall/__init__.py create mode 100644 profiler/advisor/analyzer/scheduling/__init__.py create mode 100644 profiler/advisor/analyzer/scheduling/free_event/__init__.py create mode 100644 profiler/advisor/analyzer/scheduling/fusion_ops/__init__.py delete mode 100644 profiler/advisor/cluster_perf_analysis.ipynb create mode 100644 profiler/advisor/common/__init__.py create mode 100644 profiler/advisor/common/constant.py create mode 100644 profiler/advisor/common/module_lib.py create mode 100644 profiler/advisor/common/timeline/__init__.py create mode 100644 profiler/advisor/common/timeline/event.py create mode 100644 profiler/advisor/common/timeline/fusion_ops_db.py create mode 100644 profiler/advisor/common/version_control.py delete mode 100644 profiler/advisor/compute_perf_analysis.ipynb create mode 100644 profiler/advisor/config/__init__.py create mode 100644 profiler/advisor/config/config.ini create mode 100644 profiler/advisor/config/config.py create mode 100644 profiler/advisor/dataset/__init__.py create mode 100644 profiler/advisor/display/__init__.py create mode 100644 profiler/advisor/display/html/__init__.py create mode 100644 profiler/advisor/display/html/render.py create mode 100644 profiler/advisor/display/html/templates/affinity_api.html create mode 100644 profiler/advisor/display/html/templates/main.html create mode 100644 profiler/advisor/display/html/templates/overall_analysis.html delete mode 100644 profiler/advisor/img/advisor_result.PNG delete mode 100644 profiler/advisor/img/jupyter_report.PNG create mode 100644 profiler/advisor/interface/__init__.py create mode 100644 profiler/advisor/interface/interface.py delete mode 100644 profiler/advisor/overall_perf_analysis.ipynb create mode 100644 profiler/advisor/result/__init__.py create mode 100644 profiler/advisor/result/item.py create mode 100644 profiler/advisor/result/result.py create mode 100644 profiler/advisor/rules/__init__.py create mode 100644 profiler/advisor/rules/timeline_fusion_ops.yaml delete mode 100644 profiler/advisor/timeline_perf_analysis.ipynb create mode 100644 profiler/advisor/utils/__init__.py create mode 100644 profiler/advisor/utils/log.py create mode 100644 profiler/advisor/utils/tools.py create mode 100644 profiler/advisor/utils/utils.py create mode 100644 profiler/advisor/version.py diff --git a/.gitignore b/.gitignore index a81c8ee121..36aacc7241 100644 --- a/.gitignore +++ b/.gitignore @@ -136,3 +136,9 @@ dmypy.json # Cython debug symbols cython_debug/ + +# advisor analysis output +att_advisor*.html +*.xlsx +operator_tuning_file*.cfg +.ipynb_checkpoints/ \ No newline at end of file diff --git a/profiler/advisor/README.md b/profiler/advisor/README.md deleted file mode 100644 index 722243cdc2..0000000000 --- a/profiler/advisor/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# 性能分析工具 - -性能分析工具是将Ascend PyTorch Profiler采集的性能数据进行分析,并输出性能调优建议的工具 。使用方式如下: - -下列以Windows环境下执行为例介绍。 - -1. 在环境下安装jupyter notebook工具。 - - ```bash - pip install jupyter notebook - ``` - - jupyter notebook工具的具体安装和使用指导请至jupyter notebook工具官网查找。 - -2. 在环境下安装ATT工具。 - - ``` - git clone https://gitee.com/ascend/att.git - ``` - - 安装环境下保存Ascend PyTorch Profiler采集的性能数据。 - -3. 进入att\profiler\advisor目录执行如下命令启动jupyter notebook工具。 - - ```bash - jupyter notebook - ``` - - 执行成功则自动启动浏览器读取att\profiler\advisor目录,如下示例: - - ![jupyter_report](img/jupyter_report.PNG) - - 若在Linux环境下则回显打印URL地址,即是打开jupyter notebook工具页面的地址,需要复制URL,并使用浏览器访问(若为远端服务器则需要将域名“**localhost**”替换为远端服务器的IP),进入jupyter notebook工具页面。 - -4. 每个.ipynb文件为一项性能数据分析任务,选择需要的.ipynb打开,并在*_path参数下拷贝保存Ascend PyTorch Profiler采集的性能数据的路径。如下示例: - - ![advisor_result](img/advisor_result.PNG) - -5. 单击运行按钮执行性能数据分析。 - - 分析结果详细内容会在.ipynb页面下展示。 diff --git a/profiler/advisor/__init__.py b/profiler/advisor/__init__.py index 0428ee03f0..e69de29bb2 100644 --- a/profiler/advisor/__init__.py +++ b/profiler/advisor/__init__.py @@ -1,15 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - diff --git a/profiler/advisor/advisor_backend/__init__.py b/profiler/advisor/advisor_backend/__init__.py deleted file mode 100644 index a0e9f748f4..0000000000 --- a/profiler/advisor/advisor_backend/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. \ No newline at end of file diff --git a/profiler/advisor/advisor_backend/advice_base.py b/profiler/advisor/advisor_backend/advice_base.py deleted file mode 100644 index 35939bcea9..0000000000 --- a/profiler/advisor/advisor_backend/advice_base.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -from abc import abstractmethod - - -class AdviceBase: - DATA = "data" - BOTTLENECK = "bottleneck" - ADVICE = "advice" - - def __init__(self, collection_path: str): - self.collection_path = os.path.realpath(collection_path) - self.bottelneck = '' - self.output_format_data = { - self.DATA: [], - self.BOTTLENECK: '', - self.ADVICE: '' - } - - @abstractmethod - def path_check(self): - """ - check whether input path is valid - """ - - @abstractmethod - def run(self): - """ - analyze profiling data and advice - """ - - @abstractmethod - def output(self): - """ - output relevant data - """ \ No newline at end of file diff --git a/profiler/advisor/advisor_backend/advice_factory/__init__.py b/profiler/advisor/advisor_backend/advice_factory/__init__.py deleted file mode 100644 index a0e9f748f4..0000000000 --- a/profiler/advisor/advisor_backend/advice_factory/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. \ No newline at end of file diff --git a/profiler/advisor/advisor_backend/advice_factory/advice_factory.py b/profiler/advisor/advisor_backend/advice_factory/advice_factory.py deleted file mode 100644 index 639f4800cf..0000000000 --- a/profiler/advisor/advisor_backend/advice_factory/advice_factory.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os - -from common_func.path_manager import PathManager - - -class AdviceFactory: - def __init__(self, collection_path: str): - self.collection_path = os.path.realpath(collection_path) - - @staticmethod - def run_advice(self, advice: str, kwargs: dict): - """ - run advice to produce data - """ - - def produce_advice(self, advice: str, kwargs: dict): - """ - produce data for input mode and advice - """ - self.path_check() - self.advice_check(advice) - return self.run_advice(advice, kwargs) - - def path_check(self): - """ - check whether input path is valid - """ - PathManager.input_path_common_check(self.collection_path) - - def advice_check(self, advice: str): - """ - check whether input advice is valid - """ - if advice not in self.ADVICE_LIB.keys(): - msg = '[ERROR]Input advice is illegal.' - raise RuntimeError(msg) diff --git a/profiler/advisor/advisor_backend/advice_factory/cluster_advice_factory.py b/profiler/advisor/advisor_backend/advice_factory/cluster_advice_factory.py deleted file mode 100644 index 6bb93f4670..0000000000 --- a/profiler/advisor/advisor_backend/advice_factory/cluster_advice_factory.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from advice_factory.advice_factory import AdviceFactory -from cluster_advice.slow_link_advice import SlowLinkAdvice -from cluster_advice.slow_rank_advice import SlowRankAdvice -from cluster_advice.cluster_pipeline_advice import ClusterPipelineAdvice -from cluster_advice.kernel_cluster_advice import KernelClusterAdvice -from common_func_advisor.constant import Constant - - -class ClusterAdviceFactory(AdviceFactory): - ADVICE_LIB = { - Constant.SLOW_RANK: SlowRankAdvice, - Constant.SLOW_LINK: SlowLinkAdvice, - Constant.PIPELINE: ClusterPipelineAdvice, - Constant.KERNEL: KernelClusterAdvice - } - - def __init__(self, collection_path: str): - super().__init__(collection_path) - - def run_advice(self, advice: str, kwargs: dict): - """ - run advice to produce data - """ - return self.ADVICE_LIB.get(advice)(self.collection_path, kwargs).run() diff --git a/profiler/advisor/advisor_backend/advice_factory/compute_advice_factory.py b/profiler/advisor/advisor_backend/advice_factory/compute_advice_factory.py deleted file mode 100644 index 336bef7dd8..0000000000 --- a/profiler/advisor/advisor_backend/advice_factory/compute_advice_factory.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from common_func_advisor.constant import Constant -from advice_factory.advice_factory import AdviceFactory -from compute_advice.npu_fused_advice import NpuFusedAdvice -from compute_advice.npu_slow_advice import NpuSlowAdvice - - -class ComputeAdviceFactory(AdviceFactory): - ADVICE_LIB = { - Constant.NPU_FUSED: NpuFusedAdvice, - Constant.NPU_SLOW: NpuSlowAdvice, - } - - def __init__(self, collection_path: str): - super().__init__(collection_path) - - def run_advice(self, advice: str, kwargs: dict): - """ - run advice to produce data - """ - return self.ADVICE_LIB.get(advice)(self.collection_path).run() diff --git a/profiler/advisor/advisor_backend/advice_factory/overall_advice_factory.py b/profiler/advisor/advisor_backend/advice_factory/overall_advice_factory.py deleted file mode 100644 index baf80cc200..0000000000 --- a/profiler/advisor/advisor_backend/advice_factory/overall_advice_factory.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) 2024, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from advice_factory.advice_factory import AdviceFactory -from common_func_advisor.constant import Constant -from overall_advice.overall_summary_advice import OverallSummaryAdvice - - -class OverallAdviceFactory(AdviceFactory): - ADVICE_LIB = { - Constant.SUMMARY: OverallSummaryAdvice - } - - def __init__(self, collection_path: str): - super().__init__(collection_path) - - def run_advice(self, advice: str, kwargs: dict): - """ - run advice to produce data - """ - return self.ADVICE_LIB.get(advice)(self.collection_path, kwargs).run() diff --git a/profiler/advisor/advisor_backend/advice_factory/timeline_advice_factory.py b/profiler/advisor/advisor_backend/advice_factory/timeline_advice_factory.py deleted file mode 100644 index 44b352e95a..0000000000 --- a/profiler/advisor/advisor_backend/advice_factory/timeline_advice_factory.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from advice_factory.advice_factory import AdviceFactory -from common_func_advisor.constant import Constant -from timeline_advice.optimizer_advice import OptimizerAdvice -from timeline_advice.op_schedule_advice import OpScheduleAdvice - - -class TimelineAdviceFactory(AdviceFactory): - ADVICE_LIB = { - Constant.OPTIM: OptimizerAdvice, - Constant.OP_SCHE: OpScheduleAdvice, - } - - def __init__(self, collection_path: str): - super().__init__(collection_path) - - def run_advice(self, advice: str, kwargs: dict): - """ - run advice to produce data - """ - return self.ADVICE_LIB.get(advice)(self.collection_path).run() diff --git a/profiler/advisor/advisor_backend/cluster_advice/__init__.py b/profiler/advisor/advisor_backend/cluster_advice/__init__.py deleted file mode 100644 index 8400fd5ecd..0000000000 --- a/profiler/advisor/advisor_backend/cluster_advice/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/profiler/advisor/advisor_backend/cluster_advice/cluster_advice_base.py b/profiler/advisor/advisor_backend/cluster_advice/cluster_advice_base.py deleted file mode 100644 index e9be467596..0000000000 --- a/profiler/advisor/advisor_backend/cluster_advice/cluster_advice_base.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -from abc import abstractmethod -from common_func.constant import Constant -from advice_base import AdviceBase -from cluster_analysis import Interface - - -class ClusterAdviceBase(AdviceBase): - def __init__(self, collection_path: str): - super().__init__(collection_path) - - @staticmethod - def compute_max_gap_ratio(data: list, mean: float): - if mean == 0: - return 0 - else: - return (max(data) - min(data)) / mean - - def path_check(self): - """ - check whether input path is valid - """ - for file in os.listdir(self.collection_path): - if file == 'cluster_analysis_output': - print("[INFO]Cluster has been analyzed " - "because of the existence of cluster analysis output directory.") - print("[INFO]Skip Cluster analyze backend.") - return - print("[INFO] cluster analysis is in the process, please wait...") - self.cluster_analyze() - - def cluster_analyze(self): - parameter = { - Constant.COLLECTION_PATH: self.collection_path, - Constant.ANALYSIS_MODE: "all" - } - try: - Interface(parameter).run() - except Exception as e: - raise ValueError(f"Cluster analyze backend failed:{e}") from e - - @abstractmethod - def run(self): - """ - analyze profiling data and advice - """ - - @abstractmethod - def output(self): - """ - output relevant data - """ \ No newline at end of file diff --git a/profiler/advisor/advisor_backend/cluster_advice/cluster_pipeline_advice.py b/profiler/advisor/advisor_backend/cluster_advice/cluster_pipeline_advice.py deleted file mode 100644 index 7f8846f1d9..0000000000 --- a/profiler/advisor/advisor_backend/cluster_advice/cluster_pipeline_advice.py +++ /dev/null @@ -1,437 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import time -import multiprocessing -from typing import Dict -from typing import Optional -from typing import Deque -from typing import List -from typing import Tuple -from collections import defaultdict -from collections import deque -from decimal import Decimal -from dataclasses import dataclass - -from common_func.file_manager import FileManager -from common_func_advisor.constant import Constant -from common_func_advisor.trace_view_preprocessor import FineTraceViewData -from common_func_advisor.trace_view_preprocessor import TraceViewPreProcessor -from cluster_advice.cluster_advice_base import ClusterAdviceBase -from cluster_data_preprocess.pytorch_data_preprocessor import PytorchDataPreprocessor - - -@dataclass -class PipelineTimeSlice: - start: str = "" - end: str = "" - slice_type: str = "" - bp_timeslice: list = None - - def __post_init__(self): - self.bp_timeslice = self.bp_timeslice or [] - - -class PipelineTraceViewer: - STAGE_COLOR = "good" - BUBBLE_COLOR = "generic_work" - FP_COLOR = "good" - BP_COLOR = "bad" - PIPLINE_VIEW = "Pipeline View" - STAGE = "Stage" - BUBBLE = "Bubble" - FP = "FP" - BP = "BP" - - COLORS = { - STAGE: STAGE_COLOR, - BUBBLE: BUBBLE_COLOR, - FP: FP_COLOR, - BP: BP_COLOR - } - - def _gen_trace_pair(self, name: str, start_ts: str, end_ts: str, pid: str, tid: str) -> Dict: - data = { - Constant.OP_NAME: name, - Constant.CNAME: self.COLORS.get(name, self.BUBBLE), - Constant.PH: Constant.PH_X, - Constant.PID: pid, - Constant.OP_TID: tid, - Constant.TS: start_ts, - Constant.DUR: str(Decimal(end_ts) - Decimal(start_ts)) - } - - return data - - def gen_stage_bubble_trace_data(self, rank_id: int, timeslice_list: List[PipelineTimeSlice]) -> List[Dict]: - """ - generate stage bubble trace json data - """ - rank_str = f'Rank {rank_id}' - trace_data = [] - - for timeslice in timeslice_list: - data = self._gen_trace_pair(timeslice.slice_type, timeslice.start, - timeslice.end, self.PIPLINE_VIEW, rank_str) - trace_data.append(data) - - return trace_data - - def gen_fp_bp_trace_data(self, rank_id: int, timeslice_list: List[PipelineTimeSlice]) -> List[Dict]: - """ - generate fp bp trace json data - """ - rank_str = f'Rank {rank_id}' - trace_data = [] - - for timeslice in timeslice_list: - if timeslice.slice_type == self.BUBBLE: - data = self._gen_trace_pair(timeslice.slice_type, timeslice.start, - timeslice.end, self.PIPLINE_VIEW, rank_str) - trace_data.append(data) - else: - last_end = timeslice.start - for bp_bound in timeslice.bp_timeslice: - data = self._gen_trace_pair(self.FP, last_end, - bp_bound[0], self.PIPLINE_VIEW, rank_str) - trace_data.append(data) - last_end = bp_bound[1] - - data = self._gen_trace_pair(self.BP, bp_bound[0], - bp_bound[1], self.PIPLINE_VIEW, rank_str) - trace_data.append(data) - - last_data = self._gen_trace_pair(self.FP, last_end, - timeslice.end, self.PIPLINE_VIEW, rank_str) - trace_data.append(last_data) - - return trace_data - - -class ClusterPipelineAdvice(ClusterAdviceBase): - BUBBLE = "Bubble" - STAGE = "Stage" - PIPELINE_VIEW = "Pipeline View" - SAVE_JSON = "pipeline_view.json" - - def __init__(self, collection_path: str, kwargs: dict): - super().__init__(collection_path) - self.rank_ids = list(set(kwargs.get("rank_ids", []))) - self.worker_num = kwargs.get("worker_num", int(multiprocessing.cpu_count() / 2)) - self.rank_prof_dirs = {} - self.cur_data = [] - self.cur_bottleneck = {} - self.cur_advices = "" - - def run(self) -> dict: - """ - Unified entrance interface - """ - self.rank_prof_dirs = self.get_rank_prof_dirs(self.rank_ids) - if not self.rank_prof_dirs: - print("[ERROR] No rank profiling data found, please check the rank ids or dir path.") - return {} - - self.process() - self.output() - self.identify_bottleneck() - return self.output_format_data - - def process(self) -> None: - """ - process all rank profiling data by using multi-process - """ - start_time = time.time() - print(f"[INFO] Start to process {len(self.rank_prof_dirs)} rank profiling data with {self.worker_num} workers.") - with multiprocessing.Pool(self.worker_num) as pool: - results = pool.map(self.work, self.rank_prof_dirs.items()) - - for (rank_id, _), (res, show_fp_bp) in zip(self.rank_prof_dirs.items(), results): - if show_fp_bp: - self.cur_data += PipelineTraceViewer().gen_fp_bp_trace_data(rank_id, res) - else: - self.cur_data += PipelineTraceViewer().gen_stage_bubble_trace_data(rank_id, res) - print(f"[INFO] Pipline view data process finished, cost {time.time() - start_time:.2f}s.") - - @staticmethod - def _align_trace_bound(results: List) -> None: - """ - align all rank trace bound for better visualization - """ - start_list, end_list = [], [] - for res in results: - start_list.append(res[0].start) - end_list.append(res[-1].end) - - # update all rank trace bound - for res in results: - res[0].start = min(start_list) - res[-1].end = max(end_list) - - def work(self, kv: Tuple[int, str]) -> Tuple[List[PipelineTimeSlice], bool]: - """ - single process worker function - """ - show_fp_bp = False - rank_id, rank_prof_dir = kv - print(f"[INFO] [Rank {rank_id}] Start to process rank profiling data.") - json_path = os.path.join(rank_prof_dir, Constant.ASCEND_PROFILER_OUTPUT, Constant.TRACE_VIEW_JSON) - fine_data = self.load_trace_view_data(json_path) - if not fine_data.hcom_ops or not fine_data.hcom_tids: - print(f"[ERROR] [Rank {rank_id}] No hcom send recv ops found, make sure the trace view data is pipeline " - f"parallel sense.") - return [], show_fp_bp - - timeslice_list = self.get_pipeline_timeslice(fine_data.hcom_ops, fine_data.hcom_tids, fine_data.min_ts, - fine_data.max_ts) - if not fine_data.fp_ops or not fine_data.bp_ops: - print(f"[INFO] [Rank {rank_id}] No frameWork data in trace view, only show stage and bubble.") - elif len(fine_data.hcom_tids) > 1: - print(f"[WARN] [Rank {rank_id}] More than one hcom tid found, only show stage and bubble.") - else: - print(f"[INFO] [Rank {rank_id}] Found frameWork data in trace view, show fp bp and bubble.") - bp_ops = self.get_fp_bp_bound_ops(fine_data) - self.update_stage_fp_bp(timeslice_list, bp_ops) - show_fp_bp = True - print(f"[INFO] [Rank {rank_id}] Rank profiling data process finished.") - - return timeslice_list, show_fp_bp - - def identify_bottleneck(self) -> None: - pass - - def output(self) -> None: - """ - output result - """ - self.cur_data.append( - { - Constant.OP_NAME: Constant.PROCESS_NAME, - Constant.PH: Constant.PH_META, - Constant.PID: self.PIPELINE_VIEW, - Constant.OP_TID: self.PIPELINE_VIEW, - Constant.ARGS: { - Constant.OP_NAME: self.PIPELINE_VIEW - } - } - ) - self.output_format_data[self.DATA] = self.cur_data - self.output_format_data[self.BOTTLENECK] = self.cur_bottleneck - self.output_format_data[self.ADVICE] = self.cur_advices - - def get_rank_prof_dirs(self, rank_ids: list) -> Dict[int, str]: - """ - get rank profiling directories by rank ids - """ - rank_prof_dirs = defaultdict(str) - prof_dirs = [] - for prof_dir in os.listdir(self.collection_path): - if prof_dir.endswith(Constant.PT_PROF_SUFFIX): - prof_dirs.append(os.path.join(self.collection_path, prof_dir)) - - data_map = PytorchDataPreprocessor(prof_dirs).get_data_map() - for rank_id in rank_ids: - if rank_id in data_map: - rank_prof_dirs[rank_id] = data_map[rank_id] - else: - print(f'[Warning] Rank {rank_id} not found in {self.collection_path}') - - return rank_prof_dirs - - @staticmethod - def load_trace_view_data(json_path) -> Optional[FineTraceViewData]: - """ - load trace view data from json file and preprocess - """ - raw_data = FileManager.read_json_file(json_path) - return TraceViewPreProcessor().process(raw_data) - - @staticmethod - def double_queue_pop(fp_que: Deque[dict], bp_que: Deque[dict]) -> Tuple[list, list]: - """ - double queue (fp and bp que) pop alternating algorithm implementation - """ - res_fp_ops, res_bp_ops = [], [] - pop_fp = fp_que[0][Constant.TS] < bp_que[0][Constant.TS] - fp_start_op, fp_end_op = fp_que[0], fp_que[0] - bp_start_op, bp_end_op = bp_que[0], bp_que[0] - - def update_bound_op(que: Deque[dict], start_op: dict, end_op: dict) -> Tuple[dict, dict]: - """ - update fp and bp bound op - """ - op = que.popleft() - op_s = Decimal(op[Constant.TS]) - op_e = op_s + Decimal(op[Constant.DUR]) - - start_op = op if op_s < Decimal(start_op[Constant.TS]) else start_op - end_op = op if op_e > Decimal(end_op[Constant.TS]) + Decimal(end_op[Constant.DUR]) else end_op - - return start_op, end_op - - while fp_que and bp_que: - if pop_fp: - if len(fp_que) > 1 and bp_que and fp_que[1][Constant.TS] > bp_que[0][Constant.TS]: - pop_fp = False # pop bp que - if len(fp_que) == 1: - pop_fp = False # pop bp que - - fp_start_op, fp_end_op = update_bound_op(fp_que, fp_start_op, fp_end_op) - - # time to pop bp que, need to record fp ops and update bp start op - if not pop_fp: - res_fp_ops.append((fp_start_op, fp_end_op)) - if fp_que: - bp_start_op, bp_end_op = bp_que[0], bp_que[0] - else: - if len(bp_que) > 1 and fp_que and bp_que[1][Constant.TS] > fp_que[0][Constant.TS]: - pop_fp = True # pop fp que - if len(bp_que) == 1: - pop_fp = True # pop fp que - - bp_start_op, bp_end_op = update_bound_op(bp_que, bp_start_op, bp_end_op) - - # time to pop fp que, need to record bp ops and update fp start op - if pop_fp: - res_bp_ops.append((bp_start_op, bp_end_op)) - if bp_que: - fp_start_op, fp_end_op = fp_que[0], fp_que[0] - - if fp_que: - fp_start_op, fp_end_op = fp_que[0], fp_que[0] - while fp_que: - fp_start_op, fp_end_op = update_bound_op(fp_que, fp_start_op, fp_end_op) - res_fp_ops.append((fp_start_op, fp_end_op)) - - if bp_que: - bp_start_op, bp_end_op = bp_que[0], bp_que[0] - while bp_que: - bp_start_op, bp_end_op = update_bound_op(bp_que, bp_start_op, bp_end_op) - res_bp_ops.append((bp_start_op, bp_end_op)) - - return res_fp_ops, res_bp_ops - - @staticmethod - def update_ops_time(ops_list: List[List[dict]], torch_to_npu_links: List[dict], - npu_ops_ts_dur: dict) -> List[List[dict]]: - """ - update fp and bp bound ops time at device by using torch_to_npu_links - """ - ops_que = deque(ops_list) - torch_to_npu_que = deque(torch_to_npu_links) - res = [] - link_stack = [] - while ops_que and torch_to_npu_que: - link = torch_to_npu_que.popleft() - link_s = Decimal(link[Constant.TS]) - - # bound op at framework level - cpu_op_l, cpu_op_r = ops_que[0][0], ops_que[0][1] - cpu_op_s = Decimal(cpu_op_l[Constant.TS]) - cpu_op_e = Decimal(cpu_op_r[Constant.TS]) + Decimal(cpu_op_r[Constant.DUR]) - - if cpu_op_s < link_s < cpu_op_e: - link_stack.append(link) - if link_s > cpu_op_e or \ - (link_stack and not torch_to_npu_que): - min_link = link_stack[0] - max_link = link_stack[-1] - - min_link_s = str(min_link[Constant.ID]) - max_link_s = str(max_link[Constant.ID]) - # for compatibility with old data (ts is float type) - if isinstance(min_link[Constant.ID], float): - cpu_op_l["npu_op_ts"] = min_link_s - cpu_op_r["npu_op_ts"] = max_link_s - else: - cpu_op_l["npu_op_ts"] = f"{min_link_s[:-3]}.{min_link_s[-3:]}" - cpu_op_r["npu_op_ts"] = f"{max_link_s[:-3]}.{max_link_s[-3:]}" - cpu_op_l["npu_op_dur"] = npu_ops_ts_dur.get(cpu_op_l["npu_op_ts"], 0) - cpu_op_r["npu_op_dur"] = npu_ops_ts_dur.get(cpu_op_r["npu_op_ts"], 0) - - res.append([cpu_op_l, cpu_op_r]) - ops_que.popleft() - link_stack.clear() - - return res - - def get_fp_bp_bound_ops(self, fine_data: FineTraceViewData) -> List[List[dict]]: - """ - get fp and bp bound ops by using double queue alternating pop algorithm and - update fp and bp bound ops time at device by using torch_to_npu_links - """ - fp_que = deque(fine_data.fp_ops) - bp_que = deque(fine_data.bp_ops) - - # get fp and bp bound ops - _, res_bp_ops = self.double_queue_pop(fp_que, bp_que) - - # according to torch_to_npu_links, split fp and bp timeslice - bp_ops = self.update_ops_time(res_bp_ops, fine_data.torch_to_npu_links, fine_data.npu_ops_ts_dur) - return bp_ops - - def get_pipeline_timeslice(self, hcom_ops: list, hcom_tids: list, - min_ts: str, max_ts: str) -> List[PipelineTimeSlice]: - """ - get pipeline timeslice by using hcom ops - """ - timeslice_list = [] - last_op_end = None - if len(hcom_tids) > 1: - print("[WARN] More than one hcom tid found, default to show minimal tid pipeline view.") - - for op in hcom_ops: - if op[Constant.OP_TID] == min(hcom_tids): - # gap between two hcom ops - if last_op_end: - timeslice_list.append(PipelineTimeSlice(str(last_op_end), op[Constant.TS], self.STAGE)) - # hcom op - last_op_end = Decimal(op[Constant.TS]) + Decimal(op[Constant.DUR]) - timeslice_list.append(PipelineTimeSlice(op[Constant.TS], str(last_op_end), self.BUBBLE)) - - # add start STAGE and end STAGE - timeslice_list.insert(0, PipelineTimeSlice(min_ts, timeslice_list[0].start, self.STAGE)) - timeslice_list.insert(len(timeslice_list), PipelineTimeSlice(timeslice_list[-1].end, max_ts, self.STAGE)) - return timeslice_list - - def update_stage_fp_bp(self, timeslice_list: List[PipelineTimeSlice], - bp_ops: List[List[dict]]) -> None: - """ - update stage fp and bp time - """ - pipeline_que = deque(timeslice_list) - bp_bound_que = deque(bp_ops) - - while pipeline_que and bp_bound_que: - while pipeline_que[0].slice_type != self.STAGE: - pipeline_que.popleft() - if not pipeline_que: - return None - - bp_bound_data = bp_bound_que[0] - bp_bound_s = Decimal(bp_bound_data[0]['npu_op_ts']) - bp_bound_e = Decimal(bp_bound_data[1]['npu_op_ts']) + Decimal(bp_bound_data[1]['npu_op_dur']) - - pipeline_s = Decimal(pipeline_que[0].start) - pipeline_e = Decimal(pipeline_que[0].end) - - if pipeline_s <= bp_bound_s and bp_bound_e <= pipeline_e: - pipeline_que[0].bp_timeslice.append((str(bp_bound_s), str(bp_bound_e))) - bp_bound_que.popleft() - elif bp_bound_s > pipeline_e: - pipeline_que.popleft() - else: - bp_bound_que.popleft() diff --git a/profiler/advisor/advisor_backend/cluster_advice/kernel_cluster_advice.py b/profiler/advisor/advisor_backend/cluster_advice/kernel_cluster_advice.py deleted file mode 100644 index 6fa83c765f..0000000000 --- a/profiler/advisor/advisor_backend/cluster_advice/kernel_cluster_advice.py +++ /dev/null @@ -1,62 +0,0 @@ -import os -import pandas as pd -from common_func.path_manager import PathManager -from common_func.constant import Constant -from common_func_advisor.constant import Constant as AdvisorConstant -from cluster_advice.cluster_advice_base import ClusterAdviceBase -from cluster_data_preprocess.pytorch_data_preprocessor import PytorchDataPreprocessor - - -class KernelClusterAdvice(ClusterAdviceBase): - COLUMNS_TO_GROUP = ["Name", "Input Shapes", "Input Data Types", "Output Shapes"] - COLUMNS_TO_CAL = ["Duration(us)"] - CAL_FUN = ['mean', 'var', 'max', 'min', 'count', 'sum'] - - def __init__(self, collection_path: str, kwargs: dict = None): - super().__init__(collection_path) - self.all_kernel_data = pd.DataFrame() - - def run(self): - self.load_kernel_details_data() - return self.calculate_data() - - def load_kernel_details_data(self): - prof_dirs = self.get_prof_dirs(self.collection_path) - if not prof_dirs: - msg = "[ERROR] There is no profile in this collection path, terminate analysis." - raise RuntimeError(msg) - - data_map = PytorchDataPreprocessor(prof_dirs).get_data_map() - self.all_kernel_data = pd.DataFrame() - for rank_id, profiling_dir_path in data_map.items(): - kernel_file = os.path.join(profiling_dir_path, Constant.SINGLE_OUTPUT, Constant.KERNEL_DETAILS_CSV) - if kernel_file: - # 判断csv文件大小 - PathManager.check_path_readable(kernel_file) - # 读取CSV文件 - df_temp = pd.read_csv(kernel_file) - columns_to_keep = self.COLUMNS_TO_GROUP + self.COLUMNS_TO_CAL - if [1 for element in columns_to_keep if element not in list(df_temp)]: - msg = "[ERROR] Kernel details.csv has wrong data columns, terminate analysis." - raise RuntimeError(msg) - df = df_temp[columns_to_keep] - df.insert(loc=0, column='rank id', value=rank_id) - # 将数据添加到最终的数据框中 - self.all_kernel_data = pd.concat([self.all_kernel_data, df], ignore_index=True) - - def calculate_data(self): - # 存储所有合并后的数据 - calculate_dict = {self.COLUMNS_TO_CAL[i]: self.CAL_FUN - for i in range(len(self.COLUMNS_TO_CAL))} - group_col = ["rank id"] + self.COLUMNS_TO_GROUP - view_data = self.all_kernel_data.groupby(group_col).agg(calculate_dict).reset_index() - view_data.columns = [''.join(col) if col[1] == "" else '_'.join(col) for col in view_data.columns] - return view_data - - def get_prof_dirs(self, collection_path): - prof_dirs = [] - for prof_dir in os.listdir(collection_path): - if prof_dir.endswith(AdvisorConstant.PT_PROF_SUFFIX): - prof_dirs.append(os.path.join(collection_path, prof_dir)) - - return prof_dirs \ No newline at end of file diff --git a/profiler/advisor/advisor_backend/cluster_advice/slow_link_advice.py b/profiler/advisor/advisor_backend/cluster_advice/slow_link_advice.py deleted file mode 100644 index f8a625242f..0000000000 --- a/profiler/advisor/advisor_backend/cluster_advice/slow_link_advice.py +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -from collections import defaultdict -from common_func_advisor.constant import Constant -from common_func.file_manager import FileManager -from cluster_advice.cluster_advice_base import ClusterAdviceBase - - -class SlowLinkAdvice(ClusterAdviceBase): - RDMA_TIME_MS = "RDMA time(ms)" - RDMA_SIZE_MB = "RDMA size(mb)" - SDMA_TIME_MS = "SDMA time(ms)" - SDMA_SIZE_MB = "SDMA size(mb)" - RDMA_BANDWIDTH = "RDMA bandwidth(GB/s)" - SDMA_BANDWIDTH = "SDMA bandwidth(GB/s)" - COMMUNICATION_BANDWIDTH_INFO = "Communication Bandwidth Info" - TRANSIT_TIME = "Transit Time(ms)" - TRANSIT_SIZE = "Transit Size(MB)" - SDMA = "SDMA" - RDMA = "RDMA" - - def __init__(self, collection_path: str, kwargs: dict = None): - super().__init__(collection_path) - self.rank_bw_dict = defaultdict(lambda: { - self.RDMA_TIME_MS: 0, - self.RDMA_SIZE_MB: 0, - self.SDMA_TIME_MS: 0, - self.SDMA_SIZE_MB: 0, - }) - - @staticmethod - def compute_ratio(dividend: float, divisor: float): - if abs(divisor) < 1e-15: - return 0 - else: - return round(dividend / divisor, 4) - - def load_communication_json(self): - json_path = os.path.join(self.collection_path, Constant.CLUSTER_ANALYSIS_OUTPUT, Constant.CLUSTER_COMM_JSON) - if not os.path.exists(json_path): - msg = "[ERROR] cluster_communication.json doesn't exist, terminate analysis." - raise RuntimeError(msg) - communication_json = FileManager.read_json_file(json_path) - return communication_json - - def run(self): - self.path_check() - communication_json = self.load_communication_json() - self.process(communication_json) - self.output() - return self.output_format_data - - def process(self, communication_json: dict): - for comm_group, group_dict in communication_json.items(): - for step, step_dict in group_dict.items(): - for op, op_dict in step_dict.items(): - self.compute_bandwidth(op_dict) - if self.rank_bw_dict: - self.produce_bottleneck(self.RDMA_BANDWIDTH) - self.produce_bottleneck(self.SDMA_BANDWIDTH) - - def compute_bandwidth(self, op_dict: dict): - for rank_id, rank_dict in op_dict.items(): - try: - rank = int(rank_id) - except ValueError as e: - msg = "[ERROR] Cluster_communication.json has invalid structure." - raise ValueError(msg) from e - for comm_type, bw_dict in rank_dict.get(self.COMMUNICATION_BANDWIDTH_INFO, {}).items(): - if comm_type == self.SDMA: - self.rank_bw_dict[rank][self.SDMA_SIZE_MB] += bw_dict.get(self.TRANSIT_SIZE) - self.rank_bw_dict[rank][self.SDMA_TIME_MS] += bw_dict.get(self.TRANSIT_TIME) - if comm_type == self.RDMA: - self.rank_bw_dict[rank][self.RDMA_SIZE_MB] += bw_dict.get(self.TRANSIT_SIZE) - self.rank_bw_dict[rank][self.RDMA_TIME_MS] += bw_dict.get(self.TRANSIT_TIME) - - for rank, rank_dict in self.rank_bw_dict.items(): - self.rank_bw_dict[rank][self.RDMA_BANDWIDTH] = self.compute_ratio( - self.rank_bw_dict[rank][self.RDMA_SIZE_MB], self.rank_bw_dict[rank][self.RDMA_TIME_MS]) - self.rank_bw_dict[rank][self.SDMA_BANDWIDTH] = self.compute_ratio( - self.rank_bw_dict[rank][self.SDMA_SIZE_MB], self.rank_bw_dict[rank][self.SDMA_TIME_MS]) - - def produce_bottleneck(self, link_type: str): - data_list = [rank_dict.get(link_type, 0) for rank_id, rank_dict in self.rank_bw_dict.items()] - avg_bw = round(sum(data_list) / len(data_list), 3) - if avg_bw == 0: - return - self.bottelneck += f'{link_type}: \n' \ - f'The average is {avg_bw}, ' \ - f'while the maximum is {round(max(data_list), 3)}GB/s and ' \ - f'the minimum is {round(min(data_list), 3)}GB/s. ' \ - f'the difference is {round(max(data_list) - min(data_list), 3)}GB/s. \n' - - def output(self): - self.output_format_data[self.DATA] = self.rank_bw_dict - self.output_format_data[self.BOTTLENECK] = self.bottelneck diff --git a/profiler/advisor/advisor_backend/cluster_advice/slow_rank_advice.py b/profiler/advisor/advisor_backend/cluster_advice/slow_rank_advice.py deleted file mode 100644 index 4e789fb7fb..0000000000 --- a/profiler/advisor/advisor_backend/cluster_advice/slow_rank_advice.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -from collections import defaultdict -from common_func_advisor.constant import Constant -from common_func.file_manager import FileManager -from cluster_advice.cluster_advice_base import ClusterAdviceBase -from prof_bean_advisor.cluster_step_trace_time_bean import ClusterStepTraceTimeBean - - -class SlowRankAdvice(ClusterAdviceBase): - RANK = "rank" - RATIO_THRESHOLD = 0.05 - BOTTLENECK_LIST = ['Computing', 'Communication', "Free"] - - def __init__(self, collection_path: str, kwargs: dict = None): - super().__init__(collection_path) - - def load_step_time(self): - csv_path = os.path.join(self.collection_path, Constant.CLUSTER_ANALYSIS_OUTPUT, Constant.CLUSTER_STEP_TIME_CSV) - if not os.path.exists(csv_path): - msg = "[ERROR] cluster_step_trace_time.csv doesn't exist, terminate analysis." - raise RuntimeError(msg) - step_time = FileManager.read_csv_file(csv_path, ClusterStepTraceTimeBean) - return step_time - - def run(self): - self.path_check() - step_data = self.load_step_time() - step_dict = self.process(step_data) - self.output(step_dict) - return self.output_format_data - - def process(self, step_data: list): - step_dict = defaultdict(lambda: [0, 0, 0, 0]) - for step_bean in step_data: - if step_bean.type == self.RANK: - step_dict[step_bean.index][0] += step_bean.compute - step_dict[step_bean.index][1] += step_bean.communication - step_dict[step_bean.index][2] += step_bean.free - total_time_list = [sum(data_tuple) for rank_id, data_tuple in step_dict.items()] - if total_time_list: - mean_total_time = sum(total_time_list) / len(total_time_list) - for i in range(len(self.BOTTLENECK_LIST)): - self.produce_bottleneck(step_dict, i, mean_total_time) - return step_dict - - def produce_bottleneck(self, step_dict: dict, produce_type: int, mean_total_time: float): - data_list = [data_tuple[produce_type] for rank_id, data_tuple in step_dict.items()] - max_ratio = self.compute_max_gap_ratio(data_list, mean_total_time) - if max_ratio > self.RATIO_THRESHOLD: - self.bottelneck += f'{self.BOTTLENECK_LIST[produce_type]} has some issues in the cluster, ' \ - f'because the max difference of {self.BOTTLENECK_LIST[produce_type]} time ' \ - f'has reached {round(max_ratio * mean_total_time / 1000, 3)}ms. \n' - - def output(self, step_dict: dict): - self.output_format_data[self.DATA] = step_dict - self.output_format_data[self.BOTTLENECK] = self.bottelneck diff --git a/profiler/advisor/advisor_backend/common_func_advisor/__init__.py b/profiler/advisor/advisor_backend/common_func_advisor/__init__.py deleted file mode 100644 index 8400fd5ecd..0000000000 --- a/profiler/advisor/advisor_backend/common_func_advisor/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/profiler/advisor/advisor_backend/common_func_advisor/constant.py b/profiler/advisor/advisor_backend/common_func_advisor/constant.py deleted file mode 100644 index 46a7fb24c2..0000000000 --- a/profiler/advisor/advisor_backend/common_func_advisor/constant.py +++ /dev/null @@ -1,225 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from enum import Enum - - -class CsvTitle: - MODEL_NAME = "Model Name" - MODEL_ID = "Model ID" - TASK_ID = "Task ID" - STREAM_ID = "Stream ID" - INFER_ID = "Infer ID" - TASK_START_TIME = "Task Start Time(us)" - TASK_WAIT_TIME = "Task Wait Time(us)" - BLOCK_DIM = "Block Dim" - MIX_BLOCK_DIM = "Mix Block Dim" - HF32_ELIGIBLE = "HF32 Eligible" - INPUT_SHAPES = "Input Shapes" - INPUT_DATA_TYPES = "Input Data Types" - INPUT_FORMATS = "Input Formats" - OUTPUT_SHAPES = "Output Shapes" - OUTPUT_DATA_TYPES = "Output Data Types" - OUTPUT_FORMATS = "Output Formats" - CONTEXT_ID = "Context ID" - AICORE_TIME = "aicore_time(us)" - AIC_TOTAL_CYCLES = "aic_total_cycles" - AIC_MAC_TIME = "aic_mac_time(us)" - AIC_MAC_RATIO = "aic_mac_ratio" - AIC_SCALAR_TIME = "aic_scalar_time(us)" - AIC_SCALAR_RATIO = "aic_scalar_ratio" - AIC_MTE1_TIME = "aic_mte1_time(us)" - AIC_MTE1_RATIO = "aic_mte1_ratio" - AIC_MTE2_TIME = "aic_mte2_time(us)" - AIC_MTE2_RATIO = "aic_mte2_ratio" - AIC_FIXPIPE_TIME = "aic_fixpipe_time(us)" - AIC_FIXPIPE_RATIO = "aic_fixpipe_ratio" - AIC_ICACHE_MISS_RATE = "aic_icache_miss_rate" - AIV_TIME = "aiv_time(us)" - AIV_TOTAL_CYCLES = "aiv_total_cycles" - AIV_VEC_TIME = "aiv_vec_time(us)" - AIV_VEC_RATIO = "aiv_vec_ratio" - AIV_SCALAR_TIME = "aiv_scalar_time(us)" - AIV_SCALAR_RATIO = "aiv_scalar_ratio" - AIV_MTE2_TIME = "aiv_mte2_time(us)" - AIV_MTE2_RATIO = "aiv_mte2_ratio" - AIV_MTE3_TIME = "aiv_mte3_time(us)" - AIV_MTE3_RATIO = "aiv_mte3_ratio" - AIV_ICACHE_MISS_RATE = "aiv_icache_miss_rate" - CUBE_UTILIZATION = "cube_utilization( %)" - TASK_DURATION_SUM = "Task Duration Sum(us)" - TASK_DURATION_MEAN = "Task Duration Mean(us)" - TASK_DURATION_STD = "Task Duration Std(us)" - TASK_DURATION_RATIO = "Task Duration Ratio(100%)" - SIZE = "size(MB)" - THROUGHPUT = "throughput(GB/s)" - COLOR = "color" - GAP = "Gap(us)" - DURATION_SUM = "Duration Sum(us)" - COUNT = "Count" - MAX_DURATION = "Max Duration(us)" - MIN_DURATION = "Min Duration(us)" - AVG_DURATION = "Avg Duration(us)" - DURATION_RATIO = "Duration Ratio" - INDEX = "Index" - - -# 定义CSV_TITILE_V1类,继承自CSV_TITILE类, 适配旧版csv -class CsvTitleV1(CsvTitle): - OP_NAME = "Op Name" - OP_TYPE = "OP Type" - TASK_TYPE = "Task Type" - TASK_DURATION = "Task Duration(us)" - - -# 定义CSV_TITILE_V1类,继承自CSV_TITILE类, 适配新版csv -class CsvTitleV2(CsvTitle): - OP_NAME = "Name" - OP_TYPE = "Type" - TASK_TYPE = "Accelerator Core" - TASK_DURATION = "Duration(us)" - - -class Constant: - DTYPE_SIZE_MAP = {"int8": 1, "uint8": 1, - "int16": 2, "uint16": 2, - "int32": 4, "uint32": 4, - "int64": 8, "uint64": 8, - "float16": 2, - "bfloat16": 2, - "bf16": 2, - "dt_bf16": 2, - "float32": 4, - "float": 4, - "float64": 8, - "complex64": 8, - "complex128": 16, - "bool": 1} - TP_THRESHOLD = 1150 - MAX_INPUT_MODE_LEN = 30 - MAX_INPUT_ADVICE_LEN = 30 - SMALL_OP_DUR_RATIO = 0.2 - SMALL_OP_NUM_RATIO = 0.2 - BYTE_UNIT_TRANS = 1024 - UNIT_TRANS = 1000 - - # mode list - COMPUTE = "compute" - TIMELINE = "timeline" - CLUSTER = "cluster" - OVERALL = "overall" - PIPELINE = "pipeline" - - # advice list - SLOW_RANK = "slow rank" - SLOW_LINK = "slow link" - KERNEL = "kernel" - - # compute - NPU_FUSED = "npu_fused" - NPU_SLOW = "npu_slow" - - # timeline - OPTIM = "optimizer" - OP_SCHE = "op_schedule" - - # overall - SUMMARY = "summary" - - PT_PROF_SUFFIX = "ascend_pt" - ASCEND_PROFILER_OUTPUT = "ASCEND_PROFILER_OUTPUT" - COLLECTION_PATH = "collection_path" - CLUSTER_ANALYSIS_OUTPUT = "cluster_analysis_output" - KERNEL_DETAILS_CSV = "kernel_details.csv" - CLUSTER_STEP_TIME_CSV = "cluster_step_trace_time.csv" - CLUSTER_COMM_JSON = "cluster_communication.json" - - # pipline - OP_NAME = "name" - OP_TID = "tid" - PID = "pid" - TS = "ts" - DUR = "dur" - CAT = "cat" - ARGS = "args" - PH = "ph" - ID = "id" - PH_START = "s" - PH_BEGIN = "B" - PH_END = "E" - PH_META = "M" - PH_X = "X" - CNAME = "cname" - PROCESS_NAME = "process_name" - FRAMEWORK_NAME = "Python" - ASCEND_HARDWARE_NAME = "Ascend Hardware" - ASYNC_NPU = "async_npu" - STEP_PREFIX = "ProfilerStep#" - FP_ATEN_OP = "aten" - FP_C10D_OP = "c10d" - HCOM_OP_PREFIX = "hcom_" - BP_AUTOGRAD_OP = "autograd" - TRACE_VIEW_JSON = "trace_view.json" - - # pattern_dict key: pattern, value: pattern name - PATTERN_DICT = {("Add", "DropOutDoMask", "Add"): "bias_dropout_add", - ("BatchMatMul", "Mul", "Cast", "Mul", "MaskedFill", "SoftmaxV2", "Cast", "DropOutDoMask", - "AsStrided", "BatchMatMul", "Transpose"): "FA", - ("Transpose", "Transpose", "Transpose", "Mul", "Transpose", "BatchMatMulV2", "MaskedFill", - "Cast", "SoftmaxV2", "Cast", "DropOutDoMask", "BatchMatMulV2", "Transpose"): "FA", - ("Transpose", "BatchMatMulV2", "Transpose", "Transpose", "BatchMatMulV2", "ZerosLike", - "DropOutDoMask", "Cast", "SoftmaxGrad", "Cast", "MaskedFill", "BatchMatMulV2", - "BatchMatMulV2", "Mul"): "FA", - ("Cast", "Square", "ReduceMeanD", "Add", "Rsqrt", "Cast", "Cast", "Mul", "Cast", "Cast", - "Mul", "Cast"): "RMSNORM", - ("Cast", "LayerNorm", "Cast"): "LayerNorm", - ("Add", "LayerNorm"): "AddLayerNorm", - ("Add", "LayerNormV3"): "AddLayerNorm", - ("Gelu", "Add"): "GeluAdd", - ("Cast", "Square", "MemSet", "ReduceMean", "Add", "Rsqrt", "Mul", "Cast", "Mul"): "RMSNorm", - ("BatchMatMul", "RealDiv", "Add", "Maximum", "SoftmaxV2", "Cast", "BatchMatMul"): "FA", - ("BatchMatMulV2", "RealDiv", "Add", "Cast", "Maximum", "Cast", "SoftmaxV2", "AsStrided", - "BatchMatMulV2"): "FA", - ("BatchMatMulV2", "RealDiv", "Add", "Cast", "SoftmaxV2", "Cast", "BroadcastTo", - "BatchMatMulV2"): "FA", - ("Mul", "Slice", "Neg", "Slice", "ConcatD", "Cast", "Mul", "Add"): "RotaryMul", - ("Mul", "AsStrided", "Neg", "AsStrided", "ConcatD", "Mul", "Add"): "RotaryMul", - ("Mul", "Slice", "Neg", "Slice", "ConcatD", "Mul", "Add"): "RotaryMul", - ("MatMulV2", "Swish", "MatMulV2", "Mul", "MatMulV2"): "FFN", - ("Transpose", "Transpose", "GatherElement", "Transpose"): "GatherElement", - ("Slice", "Slice", "Swish", "Mul"): "torch_npu.npu_swiglu", - ("Cast", "Mul", "MaskedFill", "SoftmaxV2", "Cast"): "torch_npu.npu_scaled_masked_softmax", - ("Mul", "Slice", "Neg", "Slice", "ConcatD", "Mul"): "torch_npu.npu_rotary_mul", - ("Cast", "Square", "ReduceMeanD", "Add", "Rsqrt", "Mul", "Cast", "Mul"): "torch_npu.npu_rms_norm"} - TITLE = CsvTitleV2 - - @classmethod - def update_title(cls): - cls.TITLE = CsvTitleV1 - - -class CoreType: - AIV = "AI_VECTOR_CORE" - AIC = "AI_CORE" - AICPU = "AI_CPU" - MIX_AIV = "MIX_AIV" - MIX_AIC = "MIX_AIC" - HCCL = "HCCL" - - -class PerfColor(Enum): - WHITE = 0 - GREEN = 1 - YELLOW = 2 - RED = 3 diff --git a/profiler/advisor/advisor_backend/common_func_advisor/trace_view_json.py b/profiler/advisor/advisor_backend/common_func_advisor/trace_view_json.py deleted file mode 100644 index 8171f06ee2..0000000000 --- a/profiler/advisor/advisor_backend/common_func_advisor/trace_view_json.py +++ /dev/null @@ -1,209 +0,0 @@ -# Copyright (c) 2024, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os -from abc import abstractmethod -from dataclasses import dataclass -from dataclasses import field -from typing import Dict -from typing import List - -import pandas as pd - -from common_func.file_manager import FileManager - - -@dataclass -class TraceObj: - ph: str = "" - bp: str = "" - cat: str = "" - name: str = "" - pid: int = 0 - tid: int = 0 - id: int = 0 - ts: str = "" - dur: float = 0.0 - args: dict = field(default='unknown') - - @abstractmethod - def hash(self): - raise Exception("To be implemented") - - def valid(self): - return self.name != "" - - def check_hashable(self): - if not self.valid(): - raise Exception("Illegal {} to hash".format(self.__class__.name)) - - -@dataclass -class Process(TraceObj): - def hash(self): - self.check_hashable() - # msprof 保证name唯一性 - return self.args.get("name") - - -@dataclass -class Thread(TraceObj): - def hash(self): - self.check_hashable() - # msprof 保证name唯一性 - return self.args.get("name") - - -@dataclass -class DurationEvent(TraceObj): - def hash(self): - self.check_hashable() - return self.ts - - -@dataclass -class FlowEvent(TraceObj): - s_point_ts: str = "" - e_point_ts: str = "" - - def hash(self): - self.check_hashable() - return self.e_point_ts - - -class TraceViewJson: - - def __init__(self, path): - self.processes: Dict[str, Process] = dict() - self.threads: Dict[str, Thread] = dict() - self.python_dur_events: Dict[str, DurationEvent] = dict() - self.cann_dur_events: Dict[str, DurationEvent] = dict() - self.ascend_hardware_dur_events: Dict[str, DurationEvent] = dict() - self.torch_2_npu_flow_events: Dict[str, FlowEvent] = dict() - traces = FileManager.read_json_file(path) - self._load_obj(traces) - - def get_call_stack(self, data: pd.DataFrame, index_id: int, ts_col: str) -> str: - if ts_col not in data.columns.tolist(): - print("[ERROR] No {} col found in data columns.".format(ts_col)) - return "" - row = data.loc[index_id] - timestamp = row[ts_col] - flow_event = self.get_torch_2_npu_flow_event(timestamp) - if not flow_event.valid(): - print("[ERROR] Get flow event failed for pattern {}.".format(row['pattern'])) - return "" - flow_event_s_key = flow_event.s_point_ts - python_dur_events = self.get_python_dur_events_contain_ts(flow_event_s_key) - if not python_dur_events: - print("[ERROR] No python dur event found for pattern {}.".format(row['pattern'])) - return "" - # 保持新老版本callstack兼容性 - if python_dur_events[0].args.get("Call stack"): - # 旧版本 - call_stack_list = python_dur_events[0].args.get("Call stack").split(";") - else: - python_dur_events.sort(key=lambda e: e.ts) - # 新版本 - call_stack_list = [event.name for event in python_dur_events if event.cat == "python_function"] - call_stack = "\n".join(call_stack_list) - return call_stack - - def get_torch_2_npu_flow_event(self, end_time) -> FlowEvent: - if not self.torch_2_npu_flow_events or not self.torch_2_npu_flow_events.get(end_time): - print("[ERROR] Find flow event failed for ts: {}".format(end_time)) - return FlowEvent() - return self.torch_2_npu_flow_events.get(end_time) - - def get_python_dur_events_contain_ts(self, ts) -> List[DurationEvent]: - res = [] - for event in self.python_dur_events.values(): - if float(event.ts) <= float(ts) <= float(event.ts) + event.dur: - res.append(event) - return res - - def _load_obj(self, traces): - self._load_format(traces) - if not self._check_format(): - print("[ERROR] parse json failed for error format") - return - self._load_duration_events(traces) - self._load_torch_to_npu_flow_events(traces) - - def _check_format(self): - # 当前功能只需要这两个process,可扩展 - check_processes = ['Python', 'Ascend Hardware'] - for check_process in check_processes: - if check_process in self.processes: - continue - print("[ERROR] {} process not found in json.".format(check_process)) - return False - return True - - # 加载pid, tid头 - def _load_format(self, traces: List[Dict]): - for i, trace in enumerate(traces): - if trace.get('name') == 'process_name': - if not trace.get('args') or not trace.get('args').get('name') or not trace.get('pid'): - continue - process = Process(**trace) - self.processes[process.hash()] = process - if trace.get('name') == 'thread_name': - if not trace.get('args') or not trace.get('args').get('name') or not trace.get('tid'): - continue - thread = Thread(**trace) - self.threads[thread.hash()] = thread - - def _load_duration_events(self, traces: List[Dict]): - def check_events(_trace): - return _trace.get('name') and _trace.get("ts") and _trace.get("dur") - - python_pid = self.processes.get("Python").pid - cann_pid = self.processes.get("CANN").pid - ascend_hardware_pid = self.processes.get("Ascend Hardware").pid - for i, trace in enumerate(traces): - if trace.get('ph') != 'X': - continue - if not check_events(trace): - continue - event = DurationEvent(**trace) - if trace.get('pid') == python_pid: - self.python_dur_events[event.hash()] = event - elif trace.get('pid') == cann_pid: - self.cann_dur_events[event.hash()] = event - elif trace.get("pid") == ascend_hardware_pid: - self.ascend_hardware_dur_events[event.hash()] = event - - def _load_torch_to_npu_flow_events(self, traces: List[Dict]): - def check_events(_trace): - return _trace.get('name') and _trace.get("id") and _trace.get("ts") - - flow_events_table_by_id = dict() - - python_pid = self.processes.get("Python") - for i, trace in enumerate(traces): - if trace.get('ph') != 's' and trace.get('ph') != 'f' and trace.get('pid') != python_pid: - continue - if not check_events(trace): - continue - event = flow_events_table_by_id.get(trace.get("id")) - if not event: - event = FlowEvent(**trace) - if trace.get('ph') == 's': - event.s_point_ts = trace.get('ts') - else: - event.e_point_ts = trace.get('ts') - flow_events_table_by_id[event.id] = event - - self.torch_2_npu_flow_events = {eve.hash(): eve for eve in flow_events_table_by_id.values()} diff --git a/profiler/advisor/advisor_backend/common_func_advisor/trace_view_preprocessor.py b/profiler/advisor/advisor_backend/common_func_advisor/trace_view_preprocessor.py deleted file mode 100644 index 7b9baa32d9..0000000000 --- a/profiler/advisor/advisor_backend/common_func_advisor/trace_view_preprocessor.py +++ /dev/null @@ -1,208 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re -import sys -from typing import Optional -from dataclasses import dataclass - -from common_func_advisor.constant import Constant - - -@dataclass -class FineTraceViewData: - py_pid: int = -1 - fp_tid: int = -1 - bp_tid: int = -1 - ascend_pid: int = -1 - min_ts: str = str(sys.maxsize) - max_ts: str = "0" - hcom_tids: list = None - fp_ops: list = None - bp_ops: list = None - hcom_ops: list = None - npu_ops_ts_dur: dict = None - torch_to_npu_links: list = None - - def __post_init__(self): - self.hcom_tids = self.hcom_tids or [] - self.fp_ops = self.fp_ops or [] - self.bp_ops = self.bp_ops or [] - self.hcom_ops = self.hcom_ops or [] - self.npu_ops_ts_dur = self.npu_ops_ts_dur or {} - self.torch_to_npu_links = self.torch_to_npu_links or [] - - def sort(self): - self.fp_ops.sort(key=lambda x: x[Constant.TS]) - self.bp_ops.sort(key=lambda x: x[Constant.TS]) - self.hcom_ops.sort(key=lambda x: x[Constant.TS]) - self.torch_to_npu_links.sort(key=lambda x: x[Constant.TS]) - - -class TraceViewPreProcessor: - """ - Trace view data preprocess - """ - - @staticmethod - def _is_fp_op(op_name: str) -> bool: - """ - check whether op is fp op - """ - return op_name.startswith(Constant.FP_ATEN_OP) or op_name.startswith(Constant.FP_C10D_OP) - - @staticmethod - def _is_fp_data(data: dict, fp_tid: int, py_pid: int) -> bool: - """ - check whether data is valid fp data - """ - return data[Constant.OP_TID] == fp_tid and \ - Constant.TS in data and Constant.DUR in data and \ - not data[Constant.OP_NAME].startswith(Constant.STEP_PREFIX) and \ - data[Constant.PID] == py_pid - - @staticmethod - def _is_bp_op(op_name: str) -> bool: - """ - check whether op is bp op - """ - return op_name.startswith(Constant.BP_AUTOGRAD_OP) - - @staticmethod - def _is_bp_data(data: dict, bp_tid: int, py_pid: int) -> bool: - """ - check whether data is valid bp data - """ - return data[Constant.OP_TID] == bp_tid and \ - Constant.TS in data and Constant.DUR in data and \ - data[Constant.PID] == py_pid - - @staticmethod - def _is_torch_to_npu_link(data: dict, fp_tid: int) -> bool: - """ - check whether data is torch to npu link - """ - return Constant.CAT in data and data[Constant.CAT] == Constant.ASYNC_NPU and \ - data[Constant.PH] == Constant.PH_START and \ - data[Constant.PID] == fp_tid - - @staticmethod - def _is_send_recv_op(op_name: str) -> bool: - """ - check whether op is hcom send or recv op - """ - # eg: hcom_BatchSendRecv__101_0_1 - p1 = re.compile(r'hcom_\w+SendRecv__\d+') - # eg: hcom_send__101_0_1 - p2 = re.compile(r'hcom_send__\d+') - # eg: hcom_receive__101_0_1 - p3 = re.compile(r'hcom_receive__\d+') - return bool(p1.match(op_name)) or bool(p2.match(op_name)) or bool(p3.match(op_name)) - - @staticmethod - def _is_hcom_op(op_name: str) -> bool: - """ - check whether data is hcom data - """ - return op_name.startswith(Constant.HCOM_OP_PREFIX) - - @staticmethod - def _is_python_process(data: dict) -> bool: - """ - check whether data is python process - """ - return Constant.PH in data and data[Constant.PH] == Constant.PH_META and \ - data[Constant.OP_NAME] == Constant.PROCESS_NAME and \ - data[Constant.ARGS][Constant.OP_NAME] == Constant.FRAMEWORK_NAME - - @staticmethod - def _is_step_op(data: dict) -> bool: - """ - check whether data is step data - """ - return data[Constant.OP_NAME].startswith(Constant.STEP_PREFIX) - - @staticmethod - def _is_ascend_process(data: dict) -> bool: - """ - check whether data is ascend process data - """ - return Constant.PH in data and data[Constant.PH] == Constant.PH_META and \ - data[Constant.OP_NAME] == Constant.PROCESS_NAME and \ - data[Constant.ARGS][Constant.OP_NAME] == Constant.ASCEND_HARDWARE_NAME - - @staticmethod - def _is_npu_op(data: dict, ascend_pid: int) -> bool: - """ - check whether data is npu op - """ - return Constant.PH in data and data[Constant.PH] == Constant.PH_X and \ - not data[Constant.OP_NAME].isupper() and \ - data[Constant.PID] == ascend_pid - - def process(self, raw_data: list) -> Optional[FineTraceViewData]: - """ - preprocess raw data - """ - if not raw_data: - print("[ERROR] No raw data found in trace view data.") - return None - - raw_fp_tids, raw_bp_tids, raw_hcom_tids = set(), set(), set() - fine_data = FineTraceViewData() - - # counting fp ops and bp ops tid and ascend pid - for data in raw_data: - if self._is_fp_op(data[Constant.OP_NAME]): - raw_fp_tids.add(data[Constant.OP_TID]) - elif self._is_bp_op(data[Constant.OP_NAME]): - raw_bp_tids.add(data[Constant.OP_TID]) - elif self._is_send_recv_op(data[Constant.OP_NAME]): - fine_data.hcom_ops.append(data) - raw_hcom_tids.add(data[Constant.OP_TID]) - elif self._is_python_process(data): - fine_data.py_pid = data[Constant.PID] - elif self._is_ascend_process(data): - fine_data.ascend_pid = data[Constant.PID] - - # find max and min ts in hcom ops - if self._is_hcom_op(data[Constant.OP_NAME]): - # for compatibility with old data (ts is float type) - ts = data[Constant.TS] if not isinstance(data[Constant.TS], float) else str(data[Constant.TS]) - fine_data.min_ts = min(fine_data.min_ts, ts) - fine_data.max_ts = max(fine_data.max_ts, ts) - - unique_fp_tid = list(raw_fp_tids - raw_bp_tids) - unique_bp_tid = list(raw_bp_tids) - fine_data.hcom_tids = list(raw_hcom_tids) - - if not unique_fp_tid or not unique_bp_tid: - print("[INFO] No fp or bp tid found in trace view data.") - else: - fine_data.fp_tid, fine_data.bp_tid = unique_fp_tid[0], unique_bp_tid[0] - - # filter fp ops and bp ops and torch_to_npu_links - for data in raw_data: - if self._is_fp_data(data, fine_data.fp_tid, fine_data.py_pid): - fine_data.fp_ops.append(data) - elif self._is_bp_data(data, fine_data.bp_tid, fine_data.py_pid): - fine_data.bp_ops.append(data) - elif self._is_torch_to_npu_link(data, fine_data.fp_tid): - fine_data.torch_to_npu_links.append(data) - elif self._is_npu_op(data, fine_data.ascend_pid): - fine_data.npu_ops_ts_dur[data[Constant.TS]] = data[Constant.DUR] - - fine_data.sort() - return fine_data diff --git a/profiler/advisor/advisor_backend/compute_advice/__init__.py b/profiler/advisor/advisor_backend/compute_advice/__init__.py deleted file mode 100644 index 8400fd5ecd..0000000000 --- a/profiler/advisor/advisor_backend/compute_advice/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/profiler/advisor/advisor_backend/compute_advice/compute_advice_base.py b/profiler/advisor/advisor_backend/compute_advice/compute_advice_base.py deleted file mode 100644 index cafbafd8e2..0000000000 --- a/profiler/advisor/advisor_backend/compute_advice/compute_advice_base.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from abc import abstractmethod -from collections import defaultdict -import os - -from advice_base import AdviceBase -from common_func.file_manager import FileManager - - -class ComputeAdviceBase(AdviceBase): - ASCEND_PT = 'ascend_pt' - ASCEND_PROFILER_OUTPUT = 'ASCEND_PROFILER_OUTPUT' - KERNEL_DETAIL_FILE = "kernel_details.csv" - TRACE_VIEW_FILE = "trace_view.json" - - def __init__(self, collection_path: str): - super().__init__(collection_path) - self.kernel_details_path = "" - self.has_preparse = False - self.preparse_data = defaultdict(list) - self.call_stack = None - self.trace_view_path = "" - - def path_check(self): - """ - check whether input path is valid - """ - if not os.path.exists(self.collection_path): - print("[ERROR] Path: {} is not exist.".format(self.collection_path)) - return False - if os.path.isdir(self.collection_path) and self.collection_path.endswith("ascend_pt"): - self.kernel_details_path = os.path.join(self.collection_path, "ASCEND_PROFILER_OUTPUT", - "kernel_details.csv") - if not os.path.exists(self.kernel_details_path): - print("[ERROR] kernel_details.csv is not exist in the Path: {}.".format( - os.path.join(self.collection_path, "ASCEND_PROFILER_OUTPUT"))) - return False - elif os.path.isfile(self.collection_path) and os.path.basename(self.collection_path) == "kernel_details.csv": - self.kernel_details_path = self.collection_path - else: - print("[ERROR] Please input ascend_pt or kernel_details.csv") - return False - print("[INFO] Start to analyse the target file: {}".format(self.kernel_details_path)) - self.preparse() - return True - - def has_callstack(self): - if self.call_stack is not None: - return self.call_stack - profiler_info_json_path = "" - for file in os.listdir(self.collection_path): - if file.startswith("profiler_info"): - profiler_info_json_path = os.path.join(self.collection_path, file) - break - if not profiler_info_json_path: - self.call_stack = False - return self.call_stack - self.trace_view_path = os.path.join(self.collection_path, self.ASCEND_PROFILER_OUTPUT, "trace_view.json") - if not os.path.exists(profiler_info_json_path) or not os.path.exists(self.trace_view_path): - self.call_stack = False - return self.call_stack - info = FileManager.read_json_file(profiler_info_json_path) - if not info.get("config") or not info.get("config").get("common_config") \ - or not info.get("config").get("common_config").get("with_stack"): - self.call_stack = False - return self.call_stack - activities = info.get("config").get("common_config").get("activities") - if not activities or "ProfilerActivity.CPU" not in activities: - self.call_stack = False - return self.call_stack - self.call_stack = info.get("config").get("common_config").get("with_stack") - return self.call_stack - - @abstractmethod - def run(self): - """ - analyze profiling data and advice - """ - - @abstractmethod - def output(self): - """ - output relevant data - """ - self.output_format_data[self.DATA] = self.cur_data - self.output_format_data[self.BOTTLENECK] = self.cur_bottleneck - self.output_format_data[self.ADVICE] = self.cur_advice - - def preparse(self): - if self.has_preparse: - return diff --git a/profiler/advisor/advisor_backend/compute_advice/npu_fused/__init__.py b/profiler/advisor/advisor_backend/compute_advice/npu_fused/__init__.py deleted file mode 100644 index 8400fd5ecd..0000000000 --- a/profiler/advisor/advisor_backend/compute_advice/npu_fused/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/profiler/advisor/advisor_backend/compute_advice/npu_fused/csv_analyzer.py b/profiler/advisor/advisor_backend/compute_advice/npu_fused/csv_analyzer.py deleted file mode 100644 index c85c14d618..0000000000 --- a/profiler/advisor/advisor_backend/compute_advice/npu_fused/csv_analyzer.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import multiprocessing - -import pandas as pd -import numpy as np - -from common_func_advisor.constant import Constant -from .op_perf import OpPerfFactory - - -class CSVAnalyzer: - def __init__(self, path) -> None: - self._path = path - - def process(self): - df = pd.read_csv(self._path, dtype={"Start Time(us)": str}) - # 分析是否存在可融合的算子 - op_type_list = df["Type"].tolist() - duration_list = df["Duration(us)"].tolist() - start_times = df["Start Time(us)"].tolist() - # 去除末尾的\t分隔符 - start_times = [start_time[:-1] for start_time in start_times] - result_list = [] - for pattern in Constant.PATTERN_DICT.keys(): - result_list.extend(self.find_all_sub_lists(op_type_list, duration_list, start_times, pattern)) - data_frame = pd.DataFrame(result_list) - data_frame.columns = ["pattern_name", "pattern", "len", "count", "duration sum(us)", "op durations(us)", - "index", "first_timestamp"] - return data_frame - - @staticmethod - def find_all_sub_lists(op_type_list, duration_list, start_times, expect_sub_list): - # 创建一个空字典,用来存储子列表和它们的出现次数和起始位置 - len_sub_list = len(expect_sub_list) - expect_sub_list = tuple(expect_sub_list) - sublist_dict = {} - # 遍历列表,从每个位置开始,取长度为N的子列表 - for i in range(len(op_type_list) - len_sub_list + 1): - sublist = tuple(op_type_list[i:i + len_sub_list]) - if sublist != expect_sub_list: - continue - # 如果子列表已经在字典中,就增加它的出现次数,否则就初始化为1 - if sublist in sublist_dict: - # count - sublist_dict[sublist][0] += 1 - # index - sublist_dict[sublist][1].append(i) - # total duration - sublist_dict[sublist][2] += sum(duration_list[i:i + len_sub_list]) - # duration - zip_data = zip(sublist_dict[sublist][3], duration_list[i:i + len_sub_list]) - sublist_dict[sublist][3] = [a + b for a, b in zip_data] - else: - sublist_dict[sublist] = [1, [i], sum(duration_list[i:i + len_sub_list]), - duration_list[i:i + len_sub_list], len_sub_list, start_times[i]] - # 创建一个空列表,用来存储所有重复的子列表 - repeated_sublists = [] - for sublist, (count, index, duration_sum, op_durations, sublist_len, first_time) in sublist_dict.items(): - pattern_name = Constant.PATTERN_DICT.get(sublist, "unknown") - op_durations = [round(num, 2) for num in op_durations] - repeated_sublists.append([pattern_name, sublist, sublist_len, count, - duration_sum, op_durations, index, first_time]) - if len(sublist_dict) == 0: - pattern_name = Constant.PATTERN_DICT.get(expect_sub_list, "unknown") - repeated_sublists.append([pattern_name, expect_sub_list, 0, 0, 0, 0, 0, 0]) - # 返回所有重复的子列表 - return repeated_sublists diff --git a/profiler/advisor/advisor_backend/compute_advice/npu_fused/json_analyzer.py b/profiler/advisor/advisor_backend/compute_advice/npu_fused/json_analyzer.py deleted file mode 100644 index fd2a72ffa3..0000000000 --- a/profiler/advisor/advisor_backend/compute_advice/npu_fused/json_analyzer.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (c) 2024, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pandas as pd - -from common_func_advisor.trace_view_json import TraceViewJson - - -class JSONAnalyzer(object): - def __init__(self, path): - self._path = path - - def get_custom_code(self, data: pd.DataFrame, ts_col: str, output_col: str): - trace_json = TraceViewJson(self._path) - callstacks = pd.DataFrame(columns=[output_col]) - - for i, row in data.iterrows(): - if ts_col not in data.columns.tolist(): - print("[ERROR] No {} col found in data columns.".format(ts_col)) - return callstacks - timestamp = row[ts_col] - flow_event = trace_json.get_torch_2_npu_flow_event(timestamp) - if not flow_event.valid(): - print("[ERROR] Get flow event failed for pattern {}.".format(row['pattern'])) - callstacks.loc[i] = "" - continue - flow_event_s_key = flow_event.s_point_ts - python_dur_events = trace_json.get_python_dur_events_contain_ts(flow_event_s_key) - if not python_dur_events: - print("[ERROR] No python dur event found for pattern {}.".format(row['pattern'])) - callstacks.loc[i] = "" - continue - # 保持新老版本callstack兼容性 - if python_dur_events[0].args.get("Call stack"): - # 旧版本 - callstack = python_dur_events[0].args.get("Call stack").split(";") - else: - python_dur_events.sort(key=lambda e: e.ts) - # 新版本 - callstack = [event.name for event in python_dur_events if event.cat == "python_function"] - callstack_str = "\n".join(callstack) - callstacks.loc[i] = callstack_str - return callstacks diff --git a/profiler/advisor/advisor_backend/compute_advice/npu_fused/op_perf.py b/profiler/advisor/advisor_backend/compute_advice/npu_fused/op_perf.py deleted file mode 100644 index 7bcbed5a75..0000000000 --- a/profiler/advisor/advisor_backend/compute_advice/npu_fused/op_perf.py +++ /dev/null @@ -1,196 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import functools -from typing import Dict - -from common_func_advisor.constant import Constant -from common_func_advisor.constant import CoreType -from common_func_advisor.constant import PerfColor - - -class OpPerfFactory: - @classmethod - def build(cls, op_row: Dict): - if op_row.get(Constant.TITLE.TASK_TYPE) == CoreType.AIV: - return VecOpPerf(op_row) - elif op_row.get(Constant.TITLE.TASK_TYPE) == CoreType.AIC: - return CubeOpPerf(op_row) - else: - return OpPerf(op_row) - - -class OpPerf: - def __init__(self, op_row: Dict): - if "OP Type" in op_row.keys(): - Constant.update_title() - self.row = op_row - self.model_name = op_row.get("Model Name") - self.model_id = op_row.get("Model ID") - self.task_id = op_row.get("Task ID") - self.stream_id = op_row.get("Stream ID") - self.infer_id = op_row.get("Infer ID") - self.op_name = op_row.get("Name") - self.op_type = op_row.get("Type") - self.task_type = op_row.get("Accelerator Core") - self.task_start_time = op_row.get("Start Time(us)") - self.task_duration = op_row.get("Duration(us)") - self.task_wait_time = op_row.get("Wait Time(us)") - self.block_dim = op_row.get("Block Dim") - self.mix_block_dim = op_row.get("Mix Block Dim") - - self.hf32_eligible = op_row.get("HF32 Eligible") - self.input_shapes = op_row.get("Input Shapes") - self.input_data_types = op_row.get("Input Data Types") - self.input_formats = op_row.get("Input Formats") - self.output_shapes = op_row.get("Output Shapes") - self.output_data_types = op_row.get("Output Data Types") - self.output_formats = op_row.get("Output Formats") - self.context_id = op_row.get("Context ID") - self.aicore_time = op_row.get("aicore_time(us)") - self.aic_total_cycles = op_row.get("aic_total_cycles") - - self.aic_mac_time = op_row.get("aic_mac_time(us)") - self.aic_mac_ratio = op_row.get("aic_mac_ratio") - self.aic_scalar_time = op_row.get("aic_scalar_time(us)") - self.aic_scalar_ratio = op_row.get("aic_scalar_ratio") - self.aic_mte1_time = op_row.get("aic_mte1_time(us)") - self.aic_mte1_ratio = op_row.get("aic_mte1_ratio") - self.aic_mte2_time = op_row.get("aic_mte2_time(us)") - self.aic_mte2_ratio = op_row.get("aic_mte2_ratio") - self.aic_fixpipe_time = op_row.get("aic_fixpipe_time(us)") - self.aic_fixpipe_ratio = op_row.get("aic_fixpipe_ratio") - self.aic_icache_miss_rate = op_row.get("aic_icache_miss_rate") - self.aiv_time = op_row.get("aiv_time(us)") - self.aiv_total_cycles = op_row.get("aiv_total_cycles") - self.aiv_vec_time = op_row.get("aiv_vec_time(us)") - self.aiv_vec_ratio = op_row.get("aiv_vec_ratio") - self.aiv_scalar_time = op_row.get("aiv_scalar_time(us)") - self.aiv_scalar_ratio = op_row.get("aiv_scalar_ratio") - self.aiv_mte2_time = op_row.get("aiv_mte2_time(us)") - - self.aiv_mte2_ratio = op_row.get("aiv_mte2_ratio") - self.aiv_mte3_time = op_row.get("aiv_mte3_time(us)") - self.aiv_mte3_ratio = op_row.get("aiv_mte3_ratio") - self.aiv_icache_miss_rate = op_row.get("aiv_icache_miss_rate") - self.cube_utilization = op_row.get("cube_utilization( %)") - - @staticmethod - def get_dtype_size(dtype_str: str): - return Constant.DTYPE_SIZE_MAP.get(dtype_str.lower(), 0) - - @staticmethod - def get_element_count(shape: list): - return functools.reduce(lambda x, y: int(x) * int(y), shape) - - @staticmethod - def shape_to_tuple(shape_str: str) -> tuple: - if not isinstance(shape_str, str): - return [] - shape_str = shape_str.strip('"') - split_shape = shape_str.strip(';') - if not split_shape: - return [] - pairs = split_shape.split(';') - shape_result = [] - for pair in pairs: - pair = pair.strip(";") - elements = pair.split(',') - elements = tuple(int(element) if "" != element else 0 for element in elements) - shape_result.append(elements) - return tuple(shape_result) - - @staticmethod - def dtype_to_tuple(dtypes_str: str) -> tuple: - if not isinstance(dtypes_str, str): - return [] - dtypes_str = dtypes_str.strip('"') - split_dtypes = dtypes_str.strip(';') - if not split_dtypes: - return [] - pairs = split_dtypes.split(';') - return tuple(pairs) - - def get_mac_ratio(self): - return self.aic_mac_ratio - - def get_size(self, shapes_str, dtypes_str): - shapes = self.shape_to_tuple(shapes_str) - dtypes = self.dtype_to_tuple(dtypes_str) - if len(shapes) > len(dtypes): - print(f"[ERROR] The size of shape is greater than that of dtypes.") - return 0 - if len(shapes) < len(dtypes): - shapes = list(shapes) - shapes.extend([(1,)] * (len(dtypes) - len(shapes))) - all_size = 0 - for index, shape in enumerate(shapes): - element_count = self.get_element_count(shape) - dtype_size = self.get_dtype_size(dtypes[index]) - all_size += element_count * dtype_size - return all_size - - def get_calc_size(self): - # input and output bytes (MB) - if not self.input_shapes or not self.output_shapes: - print("[ERROR] There is no tensor data, do not assess vector op performance.") - return 0 - intput_size = self.get_size(self.input_shapes, self.input_data_types) - output_size = self.get_size(self.output_shapes, self.output_data_types) - return (intput_size + output_size) / (Constant.BYTE_UNIT_TRANS * Constant.BYTE_UNIT_TRANS) - - def get_throughput(self): - # throughput(GB/s) - if not self.task_duration or abs(self.task_duration) < 1e-6: - print("[ERROR] There is no task_duration, do not assess vector op performance.") - return 0 - return self.row[Constant.TITLE.SIZE] / Constant.BYTE_UNIT_TRANS / self.task_duration * Constant.UNIT_TRANS * Constant.UNIT_TRANS - - def get_perf_color(self): - return PerfColor.WHITE - - def update(self): - self.row[Constant.TITLE.SIZE] = self.get_calc_size() - self.row[Constant.TITLE.THROUGHPUT] = self.get_throughput() - self.row[Constant.TITLE.COLOR] = self.get_perf_color().name - return self.row - - -class VecOpPerf(OpPerf): - def get_perf_color(self) -> PerfColor: - throughput = self.row[Constant.TITLE.THROUGHPUT] - op_duration = self.task_duration - tp_threshold = Constant.TP_THRESHOLD - if throughput == 0: - return PerfColor.WHITE - if throughput < tp_threshold / 2 and op_duration > 20: - return PerfColor.RED - elif tp_threshold / 2 <= throughput < tp_threshold: - return PerfColor.YELLOW - else: - return PerfColor.GREEN - - -class CubeOpPerf(OpPerf): - def get_perf_color(self) -> PerfColor: - aic_mac_ratio = self.get_mac_ratio() - if not aic_mac_ratio: - print("[WARNING] There is no aic_mac_ratio, do not assess cube op performance.") - return PerfColor.WHITE - elif aic_mac_ratio < 0.6: - return PerfColor.RED - elif 0.6 <= aic_mac_ratio < 0.8: - return PerfColor.YELLOW - else: - return PerfColor.GREEN diff --git a/profiler/advisor/advisor_backend/compute_advice/npu_fused_advice.py b/profiler/advisor/advisor_backend/compute_advice/npu_fused_advice.py deleted file mode 100644 index fd5610bbbb..0000000000 --- a/profiler/advisor/advisor_backend/compute_advice/npu_fused_advice.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -from abc import ABC - -import pandas as pd - -from compute_advice.compute_advice_base import ComputeAdviceBase -from compute_advice.npu_fused.csv_analyzer import CSVAnalyzer -from compute_advice.npu_fused.json_analyzer import JSONAnalyzer - - -class NpuFusedAdvice(ComputeAdviceBase, ABC): - - def __init__(self, collection_path: str): - super().__init__(collection_path) - self.cur_data = dict() - self.cur_bottleneck = str() - self.cur_advice = str() - self.kernel_details_path = "" - self.call_stack = None - - def run(self): - if not self.path_check(): - return self.output_format_data - self.process() - self.output() - return self.output_format_data - - def process(self): - csv_analyzer = CSVAnalyzer(self.kernel_details_path) - all_pattern_data = csv_analyzer.process() - all_pattern_data = all_pattern_data.sort_values(by='duration sum(us)', ascending=False) - filter_data = all_pattern_data.get(all_pattern_data.get("duration sum(us)", 0) > 0) - if not self.has_callstack(): - print("[Warning] No call stack info found, advice will be incomplete") - self.cur_data = filter_data - else: - json_analyzer = JSONAnalyzer(self.trace_view_path) - custom_code = json_analyzer.get_custom_code(filter_data, "first_timestamp", "custom code") - self.cur_data = pd.concat([filter_data, custom_code], axis=1) - op_num = len(self.cur_data.index) - op_dur = filter_data["duration sum(us)"].sum() - if op_num > 0: - index = 0 - self.cur_bottleneck = f"The computing time of fusable op is {round(op_dur, 2)} ms." - self.cur_advice = "" - for _, row in self.cur_data.iterrows(): - advice = f"Advice {index}:\n" - cur_op = "[" + ", ".join(row.loc["pattern"]) + "]" - npu_fused_op = row.loc["pattern_name"] - advice += f"Replace {cur_op} with {npu_fused_op}. " - if self.call_stack: - advice += f"This pattern first happened in: \n{row['custom code']}" - if index != op_num - 1: - advice += "\n" - index += 1 - self.cur_advice += advice diff --git a/profiler/advisor/advisor_backend/compute_advice/npu_slow_advice.py b/profiler/advisor/advisor_backend/compute_advice/npu_slow_advice.py deleted file mode 100644 index caff1c792c..0000000000 --- a/profiler/advisor/advisor_backend/compute_advice/npu_slow_advice.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from abc import ABC -import multiprocessing - -import pandas as pd - -from compute_advice.compute_advice_base import ComputeAdviceBase -from compute_advice.npu_fused.op_perf import OpPerfFactory -from common_func_advisor.constant import Constant -from common_func_advisor.constant import PerfColor -from advisor_backend.common_func_advisor.trace_view_json import TraceViewJson - - -class NpuSlowAdvice(ComputeAdviceBase, ABC): - OP_PERF_SHEET = "op_perf" - - def __init__(self, collection_path: str): - super().__init__(collection_path) - self.kernel_details_path = "" - self.data = pd.DataFrame() - - @staticmethod - def save_to_excel(data: pd.DataFrame, file_path: str) -> None: - writer = pd.ExcelWriter(file_path, engine="xlsxwriter", mode="w") - data.index.name = Constant.TITLE.INDEX - data.to_excel(writer, index=True, sheet_name=NpuSlowAdvice.OP_PERF_SHEET) - NpuSlowAdvice.color_sheet(data, writer.book, writer.sheets[NpuSlowAdvice.OP_PERF_SHEET]) - writer.sheets[NpuSlowAdvice.OP_PERF_SHEET].freeze_panes = "A2" - writer.close() - - @staticmethod - def color_sheet(data: pd.DataFrame, workbook, worksheet): - color_rgb = { - PerfColor.GREEN.name: workbook.add_format({'bg_color': '#C6EFCE'}), - PerfColor.YELLOW.name: workbook.add_format({'bg_color': '#FFEB9C'}), - PerfColor.RED.name: workbook.add_format({'bg_color': '#FFC7CE'}), - } - for row in data.iterrows(): - color = row[1][Constant.TITLE.COLOR] - fill_format = color_rgb.get(color) - if not fill_format: - continue - worksheet.set_row(row[0] + 1, None, fill_format) - - @staticmethod - def update_op_row(row: tuple): - return OpPerfFactory.build(row[1]).update() - - def get_call_stack(self, data: pd.DataFrame, index_id: int, ts_col: str) -> str: - if not self.has_callstack(): - print("There is no call stack info, please set 'with_stack=True'") - return "" - trace_json = TraceViewJson(self.trace_view_path) - return trace_json.get_call_stack(data, index_id, ts_col) - - def run(self): - if not self.path_check(): - return self.data - self.process() - return self.data - - def process(self): - self.data = pd.read_csv(self.kernel_details_path, dtype={"Start Time(us)": str}) - # 去除末尾的\t分隔符 - self.data["Start Time(us)"] = self.data["Start Time(us)"].apply(lambda x: x[:-1]) - pool = multiprocessing.Pool(multiprocessing.cpu_count()) - result = pool.map(self.update_op_row, self.data.iterrows()) - pool.close() - self.data = pd.DataFrame(result) diff --git a/profiler/advisor/advisor_backend/interface.py b/profiler/advisor/advisor_backend/interface.py deleted file mode 100644 index 3e20c26d4d..0000000000 --- a/profiler/advisor/advisor_backend/interface.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os -import sys - -sys.path.append( - os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "advisor_backend")) -sys.path.append( - os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), "compare_tools")) -sys.path.append( - os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), "cluster_analyse")) -from common_func_advisor.constant import Constant -from advisor_backend.advice_factory.cluster_advice_factory import ClusterAdviceFactory -from advisor_backend.advice_factory.compute_advice_factory import ComputeAdviceFactory -from advisor_backend.advice_factory.timeline_advice_factory import TimelineAdviceFactory -from advisor_backend.advice_factory.overall_advice_factory import OverallAdviceFactory - - -class Interface: - def __init__(self, collection_path: str): - self.collection_path = os.path.realpath(collection_path) - self._factory_controller = FactoryController(collection_path) - - def get_data(self: any, mode: str, advice: str, **kwargs): - if len(mode) > Constant.MAX_INPUT_MODE_LEN or len(advice) > Constant.MAX_INPUT_ADVICE_LEN: - msg = '[ERROR]Input Mode is illegal.' - raise RuntimeError(msg) - factory = self._factory_controller.create_advice_factory(mode, kwargs.get("input_path", "")) - return factory.produce_advice(advice, kwargs) - - -class FactoryController: - FACTORY_LIB = { - Constant.CLUSTER: ClusterAdviceFactory, - Constant.COMPUTE: ComputeAdviceFactory, - Constant.TIMELINE: TimelineAdviceFactory, - Constant.OVERALL: OverallAdviceFactory - } - - def __init__(self, collection_path: str): - self.collection_path = os.path.realpath(collection_path) - self.temp_input_path = None - - def create_advice_factory(self, mode: str, input_path: str): - collection_path = input_path if input_path else self.collection_path - return self.FACTORY_LIB.get(mode)(collection_path) - - -if __name__ == "__main__": - Interface() diff --git a/profiler/advisor/advisor_backend/overall_advice/overall_summary_advice.py b/profiler/advisor/advisor_backend/overall_advice/overall_summary_advice.py deleted file mode 100644 index 9fb347d823..0000000000 --- a/profiler/advisor/advisor_backend/overall_advice/overall_summary_advice.py +++ /dev/null @@ -1,174 +0,0 @@ -# Copyright (c) 2024, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os - -from advisor_backend.advice_base import AdviceBase -from compare_backend.utils.constant import Constant -from compare_interface.comparison_interface import ComparisonInterface - - -class OverallSummaryAdvice(AdviceBase): - advice_map = { - "Computing Time": "if you want more detailed advice please go to compute_perf_analysis.ipynb.", - "Uncovered Communication Time": "if you want more detailed advice please go to cluster_perf_analysis.ipynb.", - "Free Time": "if you want more detailed advice please go to timeline_perf_analysis.ipynb." - } - time_name_map = { - "Computing Time": "computing", - "Uncovered Communication Time(Wait Time)": "communication", - "Free Time": "free", - 'Cube Time(Num)': 'Cube Time', - 'Vector Time(Num)': 'Vector Time', - 'Flash Attention Time(Forward)(Num)': 'Flash Attention Time(Forward)', - 'Flash Attention Time(Backward)(Num)': 'Flash Attention Time(Backward)', - 'Other Time': "Other Computing Time", - 'SDMA Time(Num)': 'SDMA Time' - } - performance_time_dict = { - "Computing Time": ['Cube Time(Num)', 'Vector Time(Num)', 'Flash Attention Time(Forward)(Num)', - 'Flash Attention Time(Backward)(Num)', 'Other Time'], - "Uncovered Communication Time(Wait Time)": [], - "Free Time": ['SDMA Time(Num)'] - } - - def __init__(self, collection_path: str, kwargs: dict): - super().__init__(collection_path) - self.base_collection_path = kwargs.get("base_collection_path", "") - self._has_base_collection = False - self._is_minimal_profiling = False - self.cur_data = {} - self.cur_bottleneck = {} - self.cur_advices = "" - self._headers = [] - self._base_data = [] - self._comparison_data = [] - - @staticmethod - def split_duration_and_num(time_value: str) -> tuple: - split_data = time_value.split("s") # time value example: 0.229s(1756) - duration, num = 0.0, None - if len(split_data) >= 2: - try: - num = int(split_data[1].strip("()")) - except ValueError: - pass - if len(split_data) >= 1: - try: - duration = float(split_data[0]) - except ValueError: - print(f"[WARNING] Invalid time value: {time_value}.") - return duration, num - - @staticmethod - def calculate_ratio(dividend, divisor): - if not divisor: - return float("inf") - return dividend / divisor - - def run(self): - if self.path_check(): - self.process() - self.output() - self.identify_bottleneck() - return self.output_format_data - - def path_check(self): - if self.base_collection_path: - if os.path.exists(self.base_collection_path): - self._has_base_collection = True - else: - print(f"[WARNING] Invalid path which not exists: {self.base_collection_path}.") - return os.path.exists(self.collection_path) - - def process(self): - base_collection_path = self.base_collection_path if self._has_base_collection else self.collection_path - result_data = ComparisonInterface(base_collection_path, self.collection_path).compare(Constant.OVERALL_COMPARE) - for data in result_data.values(): - self._headers = data.get("headers", []) - rows = data.get("rows", []) - if len(rows) == 2: - self._base_data = rows[0] - self._comparison_data = rows[1] - if not self._headers or not self._comparison_data: - return - self._is_minimal_profiling = 'E2E Time(Not minimal profiling)' not in self._headers - if self._has_base_collection: - self.cur_data["comparison_result"] = result_data - time_category_dict = {} - for time_category, time_list in self.performance_time_dict.items(): - time_value = self.get_time_value(time_category, self._comparison_data) - if time_value == Constant.INVALID_VALUE: - continue - duration, _ = self.split_duration_and_num(time_value) - time_category = time_category.split("(")[0] - time_category_dict[time_category] = duration - self.get_sub_category_time(time_category, time_list, duration) - self.cur_data["overall_data"] = time_category_dict - - def get_time_value(self, header_name: str, data_list: list): - try: - data_index = self._headers.index(header_name) - except ValueError: - return Constant.INVALID_VALUE - try: - time_value = data_list[data_index] - except IndexError: - return Constant.INVALID_VALUE - return time_value - - def get_sub_category_time(self, category: str, time_list: list, total_duration: float): - sub_time_dict = {} - for time_name in time_list: - time_value = self.get_time_value(time_name, self._comparison_data) - if time_value == Constant.INVALID_VALUE: - continue - sub_time_dict.setdefault(f"{category} Subtype", []).append(self.time_name_map.get(time_name, "")) - duration, num = self.split_duration_and_num(time_value) - sub_time_dict.setdefault(f"Duration(s)", []).append(duration) - sub_time_dict.setdefault(f"Duration Ratio", []).append( - "{:.2%}".format(self.calculate_ratio(duration, total_duration))) - sub_time_dict.setdefault(f"Kernel Number", []).append(num) - self.cur_data[self.time_name_map.get(category)] = sub_time_dict - - def identify_bottleneck(self): - overall_data = self.cur_data.get("overall_data") - if not overall_data: - return - e2e_time = sum([data for data in overall_data.values()]) - overall_bottleneck = f"The Model E2E Time is {e2e_time}s.\n" - comparison_bottleneck = "" - for time_type, time_value in overall_data.items(): - # add subtype time bottleneck - advice = self.advice_map.get(time_type, "") - self.cur_bottleneck[self.time_name_map.get(time_type)] = f"{time_type} is {time_value}s.\n{advice}" - # add overall bottleneck - overall_bottleneck += f" -- {time_type} is {time_value}s\n" - if time_type == "Free Time" and self._is_minimal_profiling and self.calculate_ratio(time_value, - e2e_time) > 0.1: - overall_bottleneck += "percentage of free time exceed the threshold 10%." - if not self._has_base_collection: - continue - # add comparison bottleneck - base_duration, _ = self.split_duration_and_num(self.get_time_value(time_type, self._base_data)) - if time_value > base_duration: - ratio = "{:.2%}".format(self.calculate_ratio(time_value - base_duration, base_duration)) - comparison_bottleneck += f"{time_type} exceeds the benchmark by {ratio}\n" - self.cur_bottleneck["overall_data"] = overall_bottleneck - self.cur_bottleneck["comparison_result"] = comparison_bottleneck - - def output(self): - self.output_format_data[self.DATA] = self.cur_data - self.output_format_data[self.BOTTLENECK] = self.cur_bottleneck - self.output_format_data[self.ADVICE] = self.cur_advices diff --git a/profiler/advisor/advisor_backend/prof_bean_advisor/__init__.py b/profiler/advisor/advisor_backend/prof_bean_advisor/__init__.py deleted file mode 100644 index 8400fd5ecd..0000000000 --- a/profiler/advisor/advisor_backend/prof_bean_advisor/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/profiler/advisor/advisor_backend/prof_bean_advisor/cluster_step_trace_time_bean.py b/profiler/advisor/advisor_backend/prof_bean_advisor/cluster_step_trace_time_bean.py deleted file mode 100644 index b108fc77a3..0000000000 --- a/profiler/advisor/advisor_backend/prof_bean_advisor/cluster_step_trace_time_bean.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -class ClusterStepTraceTimeBean: - STEP = "Step" - TYPE = "Type" - INDEX = "Index" - COMPUTING = "Computing" - COMMUNICATION = "Communication(Not Overlapped)" - FREE = "Free" - - def __init__(self, data: dict): - self._data = data - - @property - def step(self) -> str: - return self._data.get(self.STEP, '') - - @property - def type(self) -> str: - return self._data.get(self.TYPE, '') - - @property - def index(self) -> int: - try: - return int(self._data.get(self.INDEX)) - except ValueError as e: - msg = "[ERROR] Cluster step trace time.csv has invalid value in column 'Index'." - raise ValueError(msg) from e - - @property - def compute(self) -> float: - try: - return float(self._data.get(self.COMPUTING, '')) - except ValueError as e: - msg = "[ERROR] Cluster step trace time.csv has invalid value in column 'Computing'." - raise ValueError(msg) from e - - @property - def communication(self) -> float: - try: - return float(self._data.get(self.COMMUNICATION, '')) - except ValueError as e: - msg = "[ERROR] Cluster step trace time.csv has invalid value in column 'Communication'." - raise ValueError(msg) from e - - @property - def free(self) -> float: - try: - return float(self._data.get(self.FREE, '')) - except ValueError as e: - msg = "[ERROR] Cluster step trace time.csv has invalid value in column 'Free'." - raise ValueError(msg) from e - diff --git a/profiler/advisor/advisor_backend/timeline_advice/__init__.py b/profiler/advisor/advisor_backend/timeline_advice/__init__.py deleted file mode 100644 index 8400fd5ecd..0000000000 --- a/profiler/advisor/advisor_backend/timeline_advice/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/profiler/advisor/advisor_backend/timeline_advice/op_schedule_advice.py b/profiler/advisor/advisor_backend/timeline_advice/op_schedule_advice.py deleted file mode 100644 index 9e492b2156..0000000000 --- a/profiler/advisor/advisor_backend/timeline_advice/op_schedule_advice.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from decimal import Decimal -from common_func_advisor.constant import Constant -from timeline_advice.timeline_advice_base import TimelineAdviceBase - - -class OpScheduleAdvice(TimelineAdviceBase): - def __init__(self, collection_path: str): - super().__init__(collection_path) - self.cur_data = list() - self.cur_bottleneck = str() - self.cur_advice = str() - - def run(self): - if not self.path_check(): - return self.output_format_data - self.preparse() - self.process() - self.output() - return self.output_format_data - - def process(self): - cpt_data = self.preparse_data[self.PREPARSE_TYPE.OVERLAP_CPT] - free_data = self.preparse_data[self.PREPARSE_TYPE.OVERLAP_FREE] - if not cpt_data or not free_data: - print("[ERROR] Fail to find Overlap data.") - return - - op_dur = [entry.get("dur", 0) for entry in cpt_data] - op_free = [0.0] * len(cpt_data) - merge_data = list() - merge_data.extend(cpt_data) - merge_data.extend(free_data) - merge_data.sort(key=lambda x : Decimal(x.get("ts"))) - idx = free_idx = 0 - while idx < len(merge_data) and free_idx < len(op_free): - entry = merge_data[idx] - entry_name = entry.get("name") - if entry_name == 'Free': - op_free[free_idx] = merge_data[idx].get('dur') - elif entry_name == 'Computing': - free_idx += 1 - idx += 1 - self.cur_data.append(op_dur) - self.cur_data.append(op_free) - free_ratio, cpt_ratio, _ = self.get_ratio() - if free_ratio < 0.2: - return - self.cur_bottleneck = f"NPU Utilication: {round(free_ratio * 100, 2)}%, " \ - f"NPU Free Utilization: {round(cpt_ratio * 100, 2)}%." - if len(self.preparse_data[self.PREPARSE_TYPE.SYNCHRONIZE]) > 1: - self.cur_advice = f"Device synchronize {len(self.preparse_data[self.PREPARSE_TYPE.SYNCHRONIZE])} times, " \ - "try to reduce synchronization statements to alleviate the bottleneck of operator delivery.\n" - small_op_num = self.small_op_block(op_free, op_dur) - small_op_ratio = small_op_num / len(op_dur) if op_dur else 0.0 - if small_op_ratio > Constant.SMALL_OP_NUM_RATIO: - self.cur_advice += "There are too many small operators, you can increase the batch size appropriately." - - def small_op_block(self, op_frees, op_durs): - small_op_num = 0 - for op_free, op_dur in zip(op_frees, op_durs): - if op_free > op_dur * Constant.SMALL_OP_DUR_RATIO: - small_op_num += 1 - return small_op_num - - def get_ratio(self): - cpt_data = self.preparse_data[self.PREPARSE_TYPE.OVERLAP_CPT] - free_data = self.preparse_data[self.PREPARSE_TYPE.OVERLAP_FREE] - cmu_data = self.preparse_data[self.PREPARSE_TYPE.OVERLAP_CMU] - cpt_time = sum([x.get("dur", 0) for x in cpt_data]) - free_time = sum([x.get("dur", 0) for x in free_data]) - cmu_time = sum([x.get("dur", 0) for x in cmu_data]) - total_time = cpt_time + free_time + cmu_time - if total_time > 0.0: - return (free_time / total_time, cpt_time / total_time, cmu_time / total_time) - return (0.0, 0.0, 0.0) diff --git a/profiler/advisor/advisor_backend/timeline_advice/optimizer_advice.py b/profiler/advisor/advisor_backend/timeline_advice/optimizer_advice.py deleted file mode 100644 index dee2e7ba56..0000000000 --- a/profiler/advisor/advisor_backend/timeline_advice/optimizer_advice.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from timeline_advice.timeline_advice_base import TimelineAdviceBase - - -class OptimizerAdvice(TimelineAdviceBase): - OPTIMIZER_MAP = { - "Optimizer.step#SGD.step": "torch_npu.optim.NpuFusedSGD", - "Optimizer.step#Adadelta.step": "torch_npu.optim.NpuFusedAdadelta", - "Optimizer.step#Lamb.step": "torch_npu.optim.NpuFusedLamb", - "Optimizer.step#Adam.step": "torch_npu.optim.NpuFusedAdam", - "Optimizer.step#AdamW.step": "torch_npu.optim.NpuFusedAdamW", - "Optimizer.step#AdamP.step": "torch_npu.optim.NpuFusedAdamP", - "Optimizer.step#BertAdam.step": "torch_npu.optim.NpuFusedBertAdam", - "Optimizer.step#RMSprop.step": "torch_npu.optim.NpuFusedRMSprop", - "Optimizer.step#RMSpropTF.step": "torch_npu.optim.NpuFusedRMSpropTF", - } - - def __init__(self, collection_path: str): - super().__init__(collection_path) - self.cur_data = list() - self.cur_bottleneck = str() - self.cur_advice = str() - - def run(self): - if not self.path_check(): - return self.output_format_data - self.preparse() - self.process() - self.output() - return self.output_format_data - - def process(self): - if not self.preparse_data[self.PREPARSE_TYPE.OPTIMIZER]: - return - - self.cur_data = list(set([entry.get("name", None) for entry in self.preparse_data[self.PREPARSE_TYPE.OPTIMIZER]])) - for index, opt_name in enumerate(self.cur_data): - self.cur_advice += f"You can choose {self.OPTIMIZER_MAP.get(opt_name)} to replace the current Optimizer: {opt_name}." - if index != len(self.cur_data) - 1: - self.cur_advice += "\n" - self.cur_bottleneck = self.cur_advice diff --git a/profiler/advisor/advisor_backend/timeline_advice/timeline_advice_base.py b/profiler/advisor/advisor_backend/timeline_advice/timeline_advice_base.py deleted file mode 100644 index 4c7ac96cd2..0000000000 --- a/profiler/advisor/advisor_backend/timeline_advice/timeline_advice_base.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from abc import abstractmethod -from collections import defaultdict -import json -import os - -from advice_base import AdviceBase -from common_func.file_manager import FileManager - - -class TimelineAdviceBase(AdviceBase): - class PREPARSE_TYPE: - OPTIMIZER = 0 - STEP = 1 - OVERLAP_CPT = 2 - OVERLAP_FREE = 3 - OVERLAP_CMU = 4 - ENQUEUE = 5 - DEQUEUE = 6 - HOST_TO_DEVICE = 7 - SYNCHRONIZE = 8 - - def __init__(self, collection_path: str): - super().__init__(collection_path) - self.trace_view_path = "" - self.has_preparse = False - self.preparse_data = defaultdict(list) - self.entry_map = { - 'Computing': self.PREPARSE_TYPE.OVERLAP_CPT, - 'Free': self.PREPARSE_TYPE.OVERLAP_FREE, - 'AscendCL@aclrtSynchronizeDevice': self.PREPARSE_TYPE.SYNCHRONIZE - } - - def path_check(self): - """ - check whether input path is valid - """ - if not os.path.exists(self.collection_path): - print("[ERROR] Path: {} is not exist.".format(self.collection_path)) - return False - if os.path.isdir(self.collection_path) and self.collection_path.endswith("ascend_pt"): - self.trace_view_path = os.path.join(self.collection_path, "ASCEND_PROFILER_OUTPUT", "trace_view.json") - if not os.path.exists(self.trace_view_path): - print("[ERROR] trace_view.json is not exist in the Path: {}.".format(os.path.join(self.collection_path, "ASCEND_PROFILER_OUTPUT"))) - return False - elif os.path.isfile(self.collection_path) and os.path.basename(self.collection_path) == "trace_view.json": - self.trace_view_path = self.collection_path - else: - print("[ERROR] Please input ascend_pt or trace_view.json.") - return False - print("[INFO] Start to analyse the target file: {}".format(self.trace_view_path)) - return True - - @abstractmethod - def run(self): - """ - analyze profiling data and advice - """ - - @abstractmethod - def output(self): - """ - output relevant data - """ - self.output_format_data[self.DATA] = self.cur_data - self.output_format_data[self.BOTTLENECK] = self.cur_bottleneck - self.output_format_data[self.ADVICE] = self.cur_advice - - def preparse(self): - if self.has_preparse: - return - json_reader = FileManager.read_json_file(self.trace_view_path) - if not isinstance(json_reader, list): - return - for entry in json_reader: - name = entry.get("name", None) - if not name: - continue - if name.startswith("Optimizer.step#") and name.endswith(".step"): - self.preparse_data[self.PREPARSE_TYPE.OPTIMIZER].append(entry) - elif name.startswith("ProfilerStep#"): - self.preparse_data[self.PREPARSE_TYPE.STEP].append(entry) - elif name in self.entry_map: - self.preparse_data[self.entry_map[name]].append(entry) - self.has_preparse = True diff --git a/profiler/advisor/advisor_backend/overall_advice/__init__.py b/profiler/advisor/analyzer/__init__.py similarity index 100% rename from profiler/advisor/advisor_backend/overall_advice/__init__.py rename to profiler/advisor/analyzer/__init__.py diff --git a/profiler/advisor/analyzer/base_analyzer.py b/profiler/advisor/analyzer/base_analyzer.py new file mode 100644 index 0000000000..f698865266 --- /dev/null +++ b/profiler/advisor/analyzer/base_analyzer.py @@ -0,0 +1,16 @@ +from abc import abstractmethod, ABCMeta + + +class BaseAnalyzer(metaclass=ABCMeta): + + @abstractmethod + def optimize(self): + pass + + @abstractmethod + def make_record(self): + pass + + @abstractmethod + def make_render(self): + pass diff --git a/profiler/advisor/analyzer/communication/__init__.py b/profiler/advisor/analyzer/communication/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/analyzer/communication/bandwidth/__init__.py b/profiler/advisor/analyzer/communication/bandwidth/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/analyzer/communication/environment/__init__.py b/profiler/advisor/analyzer/communication/environment/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/analyzer/computing/__init__.py b/profiler/advisor/analyzer/computing/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/analyzer/computing/aicpu/__init__.py b/profiler/advisor/analyzer/computing/aicpu/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/analyzer/computing/bound/__init__.py b/profiler/advisor/analyzer/computing/bound/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/analyzer/computing/op_compile/__init__.py b/profiler/advisor/analyzer/computing/op_compile/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/analyzer/dataloader/__init__.py b/profiler/advisor/analyzer/dataloader/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/analyzer/overall/__init__.py b/profiler/advisor/analyzer/overall/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/analyzer/scheduling/__init__.py b/profiler/advisor/analyzer/scheduling/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/analyzer/scheduling/free_event/__init__.py b/profiler/advisor/analyzer/scheduling/free_event/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/analyzer/scheduling/fusion_ops/__init__.py b/profiler/advisor/analyzer/scheduling/fusion_ops/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/cluster_perf_analysis.ipynb b/profiler/advisor/cluster_perf_analysis.ipynb deleted file mode 100644 index 39e389dd3a..0000000000 --- a/profiler/advisor/cluster_perf_analysis.ipynb +++ /dev/null @@ -1,625 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 4, - "id": "initial_id", - "metadata": { - "ExecuteTime": { - "end_time": "2023-11-21T13:31:25.022339600Z", - "start_time": "2023-11-21T13:31:25.016155200Z" - } - }, - "outputs": [], - "source": [ - "from advisor_backend.interface import Interface\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np" - ] - }, - { - "cell_type": "markdown", - "id": "57d17a21205c3c5e", - "metadata": { - "collapsed": false - }, - "source": [ - "# 集群调优分析\n", - "## 1. 集群分析的数据准备\n", - "首先我们当前支持PyTorch多卡大模型的集群分析,您需要输入集群分析的profiling_path路径,例如: \n", - "--{profiling_path} \n", - " -- xxxx_ascend_pt \n", - " -- xxxx_ascend_pt \n", - " -- xxxx_ascend_pt \n", - " ...... \n", - " -- xxxx_ascend_pt \n", - "里面每张卡的profiling文件都是ascend_pt结尾的文件。 \n", - "\n", - "## 2. 集群分析解决的问题 \n", - "当前的功能主要有四项: \n", - "1). 识别多卡间的计算慢卡(根据计算时间等推断) \n", - "2). 识别多卡间的通信慢现象(根据通信链路的带宽判断) \n", - "3). 对多卡间的计算算子进行统计展示(识别不同卡的算子差异) \n", - "4). 展示集群流水并行图(根据时间轴展示多卡间的计算和通信时间) " - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "36b7a24cc7ca5da2", - "metadata": { - "ExecuteTime": { - "end_time": "2023-11-21T12:53:38.379699800Z", - "start_time": "2023-11-21T12:53:38.363755900Z" - }, - "collapsed": false - }, - "outputs": [], - "source": [ - "# EDIT THE PROFILING DATA PATH\n", - "cluster_path = \"YOUR PATH\"\n", - "interface = Interface(cluster_path)" - ] - }, - { - "cell_type": "markdown", - "id": "cf832ac2e0dfa30f", - "metadata": { - "collapsed": false - }, - "source": [ - "## 1) 识别慢卡" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "40aac93278dd6e34", - "metadata": { - "ExecuteTime": { - "end_time": "2023-11-21T12:53:41.815599700Z", - "start_time": "2023-11-21T12:53:41.783393700Z" - }, - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[INFO]Cluster has been analyzed because of the existence of cluster analysis output directory.\n", - "[INFO]Skip Cluster analyze backend.\n" - ] - } - ], - "source": [ - "dataset = interface.get_data('cluster', 'slow rank')\n" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "cd3fceda-49f0-439f-9c54-cc31490fc99e", - "metadata": {}, - "outputs": [], - "source": [ - "# EDIT THE DATA TO SHOW WHAT YOU WANT\n", - "data = dataset.get('data')\n", - "words = dataset.get('bottleneck')\n", - "rank_ids = list(data.keys())\n", - "# 柱状图显示属性\n", - "compute_time = [data.get(key, {})[0] for key in rank_ids]\n", - "communication_time = [data.get(key, {})[1] for key in rank_ids]\n", - "free_time = [data.get(key, {})[2] for key in rank_ids]\n", - "# 柱宽\n", - "width = 0.2\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "6a1d82fb-a31b-49ab-a859-6d4bb898c512", - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Communication has some issues in the cluster, because the max difference of Communication time has reached 88.476ms. \n", - "Free has some issues in the cluster, because the max difference of Free time has reached 29.224ms. \n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA2wAAAK9CAYAAABYee9vAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAABVtElEQVR4nO3deVhU9eLH8c+AsogCubApKqa5hVYuhG2aJJrXtNyzwqXsFphLLtcy18ykMjVN65ZbN8usNK+VRqRW7oqUmrmlaQYuEaCYQsz5/dF1fk6KgCLzRd6v55nn8ZzznXM+MxHjx+85Z2yWZVkCAAAAABjHzdUBAAAAAAAXR2EDAAAAAENR2AAAAADAUBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAwHC9e/dWzZo1XR0DAOACFDYAwDVn+/bt6tKli2rUqCEvLy9VrVpV99xzj1577TWncS+88IKWLl3qkow2m61Aj9WrV7skHwDADDbLsixXhwAAoKisW7dOrVq1UvXq1RUTE6OgoCAdPnxYGzZs0P79+7Vv3z7H2PLly6tLly6aN29esef8z3/+47S8YMECJSQk6J133nFaf88996hixYqy2+3y9PQszogAAAOUcXUAAACK0sSJE+Xn56fNmzfL39/faduxY8dcE+oiHnroIaflDRs2KCEh4YL1AIDSjVMiAQDXlP3796thw4YXlDVJCggIcPzZZrMpKytL8+fPd5x+2Lt3b8f2I0eOqG/fvgoMDJSnp6caNmyoOXPmOO1v9erVstlsWrRokZ555hkFBQXJx8dH9913nw4fPlxkr+nv17AdPHhQNptNL7/8smbOnKlatWqpXLlyatOmjQ4fPizLsjRhwgRVq1ZN3t7e6tixo9LS0i7Y7+eff6477rhDPj4+qlChgtq3b6+dO3cWWW4AwJVjhg0AcE2pUaOG1q9frx07dujGG2/Mc9w777yjRx99VM2bN1f//v0lSddff70k6ejRo7r11ltls9kUFxenKlWq6PPPP1e/fv2UmZmpQYMGOe1r4sSJstlsGjFihI4dO6apU6cqKipKycnJ8vb2vmqv9d1331V2drYGDBigtLQ0xcfHq1u3brr77ru1evVqjRgxQvv27dNrr72moUOHOhXOd955RzExMYqOjtbkyZN1+vRpzZo1S7fffru2bdvGTU4AwBQWAADXkC+++MJyd3e33N3drcjISGv48OHWypUrrezs7AvG+vj4WDExMRes79evnxUcHGydOHHCaX2PHj0sPz8/6/Tp05ZlWdaqVassSVbVqlWtzMxMx7gPPvjAkmRNmzatwLljY2OtvD6WY2JirBo1ajiWDxw4YEmyqlSpYqWnpzvWjxw50pJkNW7c2MrJyXGs79mzp+Xh4WGdOXPGsizLOnnypOXv72899thjTsdJTU21/Pz8LlgPAHAdTokEAFxT7rnnHq1fv1733XefvvvuO8XHxys6OlpVq1bVsmXL8n2+ZVn66KOP1KFDB1mWpRMnTjge0dHRysjIUFJSktNzHnnkEVWoUMGx3KVLFwUHB+uzzz4r8td3vq5du8rPz8+xHBERIemv6+PKlCnjtD47O1tHjhyRJCUkJCg9PV09e/Z0en3u7u6KiIjQqlWrrmpuAEDBcUokAOCa06xZM3388cfKzs7Wd999pyVLlujVV19Vly5dlJycrAYNGuT53OPHjys9PV1vvvmm3nzzzYuO+fvNS+rUqeO0bLPZVLt2bR08ePCKX8ulVK9e3Wn5XHkLDQ296Prff/9dkrR3715J0t13333R/fr6+hZpTgDA5aOwAQCuWR4eHmrWrJmaNWumG264QX369NHixYs1ZsyYPJ9jt9sl/TVLFRMTc9ExjRo1uip5C8vd3b1Q663/fZPPudf4zjvvKCgo6IJx58/OAQBci9/IAIBSoWnTppKklJQUxzqbzXbBuCpVqqhChQrKzc1VVFRUgfZ9bsbqHMuytG/fPmOK3d+du7lKQEBAgV8jAMA1uIYNAHBNWbVqlWMm6XznrierW7euY52Pj4/S09Odxrm7u6tz58766KOPtGPHjgv2c/z48QvWLViwQCdPnnQsf/jhh0pJSVG7du0u92VcVdHR0fL19dULL7ygnJycC7Zf7DUCAFyDGTYAwDVlwIABOn36tO6//37Vq1dP2dnZWrdunRYtWqSaNWuqT58+jrFNmjTRl19+qSlTpigkJERhYWGKiIjQiy++qFWrVikiIkKPPfaYGjRooLS0NCUlJenLL7+84DvNKlasqNtvv119+vTR0aNHNXXqVNWuXVuPPfZYcb/8AvH19dWsWbP08MMP65ZbblGPHj1UpUoVHTp0SJ9++qluu+02zZgxw9UxAQCisAEArjEvv/yyFi9erM8++0xvvvmmsrOzVb16dT355JMaNWqU0xdqT5kyRf3799eoUaP0xx9/KCYmRhEREQoMDNSmTZs0fvx4ffzxx3r99ddVqVIlNWzYUJMnT77gmM8884y+//57TZo0SSdPnlTr1q31+uuvq1y5csX4ygvnwQcfVEhIiF588UW99NJLOnv2rKpWrao77rjDqdQCAFzLZl3svBEAAJCv1atXq1WrVlq8eLG6dOni6jgAgGsQ17ABAAAAgKEobAAAAABgKAobAAAAABiKa9gAAAAAwFDMsAEAAACAoShsAAAAAGAovoetGNntdv3666+qUKGCbDabq+MAAAAAcBHLsnTy5EmFhITIzS3veTQKWzH69ddfFRoa6uoYAAAAAAxx+PBhVatWLc/tFLZiVKFCBUl//Ufx9fV1cRoAAAAArpKZmanQ0FBHR8gLha0YnTsN0tfXl8IGAAAAIN9LpbjpCAAAAAAYisIGAAAAAIaisAEAAACAobiGzTCWZenPP/9Ubm6uq6OglChbtqzc3d1dHQMAAAAXQWEzSHZ2tlJSUnT69GlXR0EpYrPZVK1aNZUvX97VUQAAAPA3FDZD2O12HThwQO7u7goJCZGHhwdfro2rzrIsHT9+XL/88ovq1KnDTBsAAIBhKGyGyM7Olt1uV2hoqMqVK+fqOChFqlSpooMHDyonJ4fCBgAAYBhuOmIYNzf+k6B4MZMLAABgLtoBAAAAABiKwgYAAAAAhqKwAXmoWbOmpk6d6uoYAAAAKMW46Yjhav7r02I93sEX21/W81JTUzVx4kR9+umnOnLkiAICAnTTTTdp0KBBat26dRGnLFrz5s3ToEGDlJ6e7rR+8+bN8vHxcU0oAAAAQBQ2FIGDBw/qtttuk7+/v1566SWFh4crJydHK1euVGxsrH788UdXR7wsVapUcXUEAAAAlHKcEokr9uSTT8pms2nTpk3q3LmzbrjhBjVs2FBDhgzRhg0bJEmHDh1Sx44dVb58efn6+qpbt246evSoYx9jx47VTTfdpDlz5qh69eoqX768nnzySeXm5io+Pl5BQUEKCAjQxIkTnY5ts9k0a9YstWvXTt7e3qpVq5Y+/PBDx/bVq1fLZrM5zZ4lJyfLZrPp4MGDWr16tfr06aOMjAzZbDbZbDaNHTtW0oWnRNpsNr311lu6//77Va5cOdWpU0fLli1zyrNs2TLVqVNHXl5eatWqlebPn3/B8QEAAICCorDhiqSlpWnFihWKjY296OmD/v7+stvt6tixo9LS0rRmzRolJCTop59+Uvfu3Z3G7t+/X59//rlWrFih9957T2+//bbat2+vX375RWvWrNHkyZM1atQobdy40el5zz33nDp37qzvvvtOvXr1Uo8ePbRr164C5W/RooWmTp0qX19fpaSkKCUlRUOHDs1z/Lhx49StWzd9//33uvfee9WrVy+lpaVJkg4cOKAuXbqoU6dO+u677/T444/r2WefLVAOAAAA4GIobLgi+/btk2VZqlevXp5jEhMTtX37di1cuFBNmjRRRESEFixYoDVr1mjz5s2OcXa7XXPmzFGDBg3UoUMHtWrVSrt379bUqVNVt25d9enTR3Xr1tWqVauc9t+1a1c9+uijuuGGGzRhwgQ1bdpUr732WoHye3h4yM/PTzabTUFBQQoKClL58uXzHN+7d2/17NlTtWvX1gsvvKBTp05p06ZNkqQ33nhDdevW1UsvvaS6deuqR48e6t27d4FyAAAAABdDYcMVsSwr3zG7du1SaGioQkNDHesaNGggf39/p5mwmjVrqkKFCo7lwMBANWjQwOnLxAMDA3Xs2DGn/UdGRl6wXNAZtsJq1KiR488+Pj7y9fV15Nm9e7eaNWvmNL558+ZXJQcAAABKBwobrkidOnVks9mK5MYiZcuWdVq22WwXXWe32wu8z3Nl7/ximZOTU6QZC5MHAAAAKAwKG65IxYoVFR0drZkzZyorK+uC7enp6apfv74OHz6sw4cPO9b/8MMPSk9PV4MGDa44w7kbm5y/XL9+fUn/f6fHlJQUx/bk5GSn8R4eHsrNzb3iHHXr1tWWLVuc1p1/yicAAABQWBQ2XLGZM2cqNzdXzZs310cffaS9e/dq165dmj59uiIjIxUVFaXw8HD16tVLSUlJ2rRpkx555BHdddddatq06RUff/HixZozZ4727NmjMWPGaNOmTYqLi5Mk1a5dW6GhoRo7dqz27t2rTz/9VK+88orT82vWrKlTp04pMTFRJ06c0OnTpy8rx+OPP64ff/xRI0aM0J49e/TBBx9o3rx5kv6aiQMAAAAKi+9hM9zlfpF1capVq5aSkpI0ceJEPf3000pJSVGVKlXUpEkTzZo1SzabTZ988okGDBigO++8U25ubmrbtm2BbwySn3Hjxun999/Xk08+qeDgYL333nuOmbuyZcvqvffe0xNPPKFGjRqpWbNmev7559W1a1fH81u0aKF//vOf6t69u3777TeNGTPGcWv/wggLC9OHH36op59+WtOmTVNkZKSeffZZPfHEE/L09CyS1woAAIDSxWYV5K4RKBKZmZny8/NTRkaGfH19nbadOXNGBw4cUFhYmLy8vFyUsOSx2WxasmSJOnXq5OooFzVx4kTNnj3b6XRQ0/CzBwAAUPwu1Q3OxwwbUIRef/11NWvWTJUqVdLatWv10ksvOU7PBAAAAAqLwgYUob179+r5559XWlqaqlevrqefflojR450dSwAAACUUBQ2lGimndH76quv6tVXX3V1DAAAAFwjuEskAAAAABiKGTYAAIBr0Vi/AozJuPo5AFwRZtgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ3HTEdMV5ILhIj0eFx8XlM1m05IlS9SpU6erdox58+Zp0KBBSk9Pv2rHAAAAgLmYYUORSE1N1YABA1SrVi15enoqNDRUHTp0UGJioqujXTUpKSlq165dke2vZs2amjp1qtO67t27a8+ePUV2DAAAAJQszLDhih08eFC33Xab/P399dJLLyk8PFw5OTlauXKlYmNj9eOPP7o64lURFBR01Y/h7e0tb2/vq34cAAAAmIkZNlyxJ598UjabTZs2bVLnzp11ww03qGHDhhoyZIg2bNggSTp06JA6duyo8uXLy9fXV926ddPRo0cd+xg7dqxuuukmzZkzR9WrV1f58uX15JNPKjc3V/Hx8QoKClJAQIAmTpzodGybzaY33nhD//jHP1SuXDnVr19f69ev1759+9SyZUv5+PioRYsW2r9/v+M5vXv3vuA0xkGDBqlly5aO5ZYtW+qpp57S8OHDVbFiRQUFBWns2LEXHHvp0qWO5V9++UU9e/ZUxYoV5ePjo6ZNm2rjxo2SpP3796tjx44KDAxU+fLl1axZM3355ZdOx/v55581ePBg2Ww22Ww2SX+dEunv7+903FmzZun666+Xh4eH6tatq3feeeeCXG+99Zbuv/9+lStXTnXq1NGyZcvy/g8IAAAAY1HYcEXS0tK0YsUKxcbGysfH54Lt/v7+stvt6tixo9LS0rRmzRolJCTop59+Uvfu3Z3G7t+/X59//rlWrFih9957T2+//bbat2+vX375RWvWrNHkyZM1atQoRwk6Z8KECXrkkUeUnJysevXq6cEHH9Tjjz+ukSNHasuWLbIsS3FxcYV+bfPnz5ePj482btyo+Ph4jR8/XgkJCRcde+rUKd111106cuSIli1bpu+++07Dhw+X3W53bL/33nuVmJiobdu2qW3bturQoYMOHTokSfr4449VrVo1jR8/XikpKUpJSbnocZYsWaKBAwfq6aef1o4dO/T444+rT58+WrVqldO4cePGqVu3bvr+++917733qlevXkpLSyv0ewAAAADX4pRIXJF9+/bJsizVq1cvzzGJiYnavn27Dhw4oNDQUEnSggUL1LBhQ23evFnNmjWTJNntds2ZM0cVKlRQgwYN1KpVK+3evVufffaZ3NzcVLduXU2ePFmrVq1SRESEY/99+vRRt27dJEkjRoxQZGSknnvuOUVHR0uSBg4cqD59+hT6tTVq1EhjxoyRJNWpU0czZsxQYmKi7rnnngvGLly4UMePH9fmzZtVsWJFSVLt2rUd2xs3bqzGjRs7lidMmKAlS5Zo2bJliouLU8WKFeXu7q4KFSpc8lTLl19+Wb1799aTTz4pSY5ZzJdfflmtWrVyjOvdu7d69uwpSXrhhRc0ffp0bdq0SW3bti30+wAAAADXYYYNV8SyrHzH7Nq1S6GhoY6yJkkNGjSQv7+/du3a5VhXs2ZNVahQwbEcGBioBg0ayM3NzWndsWPHnPbfqFEjp+2SFB4e7rTuzJkzyszMLMQrc96vJAUHB19w7HOSk5N18803O8ra3506dUpDhw5V/fr15e/vr/Lly2vXrl2OGbaC2rVrl2677TandbfddpvT+/j37D4+PvL19c0zOwAAAMzFDBuuSJ06dWSz2YrkxiJly5Z1WrbZbBddd+40w4s979y1Xxdbd+55bm5uFxTNnJycAuX5+7HPye/GIEOHDlVCQoJefvll1a5dW97e3urSpYuys7Mv+bzLVZjsAAAAMBczbLgiFStWVHR0tGbOnKmsrKwLtqenp6t+/fo6fPiwDh8+7Fj/ww8/KD09XQ0aNCjOuJKkKlWqXHCNWHJy8hXts1GjRkpOTs7zOrG1a9eqd+/euv/++xUeHq6goCAdPHjQaYyHh4dyc3MveZz69etr7dq1F+zbFe8jAAAArj4KG67YzJkzlZubq+bNm+ujjz7S3r17tWvXLk2fPl2RkZGKiopSeHi4evXqpaSkJG3atEmPPPKI7rrrLjVt2rTY8959993asmWLFixYoL1792rMmDHasWPHFe2zZ8+eCgoKUqdOnbR27Vr99NNP+uijj7R+/XpJf81Efvzxx0pOTtZ3332nBx988IIZr5o1a+rrr7/WkSNHdOLEiYseZ9iwYZo3b55mzZqlvXv3asqUKfr44481dOjQK8oPAAAAM3FKpOnGZrg6Qb5q1aqlpKQkTZw4UU8//bRSUlJUpUoVNWnSRLNmzZLNZtMnn3yiAQMG6M4775Sbm5vatm2r1157zSV5o6Oj9dxzz2n48OE6c+aM+vbtq0ceeUTbt2+/7H16eHjoiy++0NNPP617771Xf/75pxo0aKCZM2dKkqZMmaK+ffuqRYsWqly5skaMGHHBNXXjx4/X448/ruuvv15nz5696PWBnTp10rRp0/Tyyy9r4MCBCgsL09y5c52+kgAAAADXDptVkLtGoEhkZmbKz89PGRkZ8vX1ddp25swZHThwQGFhYfLy8nJRQpRG/OwBwDVqrF8Bxpj/D8PAtepS3eB8nBIJAAAAAIaisAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACGorABAAAAgKEobAAAAABgKAobAAAAABiKwgYAAAAAhirj6gC4tPD54cV6vO0x24v1eAAAAADyxgwbrljv3r1ls9kueOzbt8/V0QAAAIASjRk2FIm2bdtq7ty5TuuqVKnitJydnS0PD4/ijAUAAACUaMywoUh4enoqKCjI6dG6dWvFxcVp0KBBqly5sqKjoyVJO3bsULt27VS+fHkFBgbq4Ycf1okTJxz7stvtmjRpksLCwuTt7a3GjRvrww8/dNVLAwAAAFyGwoarav78+fLw8NDatWs1e/Zspaen6+6779bNN9+sLVu2aMWKFTp69Ki6devmeM6kSZO0YMECzZ49Wzt37tTgwYP10EMPac2aNS58JQAAAEDx45RIFInly5erfPnyjuV27dpJkurUqaP4+HjH+ueff14333yzXnjhBce6OXPmKDQ0VHv27FGNGjX0wgsv6Msvv1RkZKQkqVatWvr222/1xhtv6K677iqmVwQAAAC4HoUNRaJVq1aaNWuWY9nHx0c9e/ZUkyZNnMZ99913WrVqlVO5O2f//v3KycnR6dOndc899zhty87O1s0333x1wgMAAACGorChSPj4+Kh27doXXX++U6dOqUOHDpo8efIFY4ODg7Vjxw5J0qeffqqqVas6bff09CzCxAAAAID5KGwoVrfccos++ugj1axZU2XKXPjj16BBA3l6eurQoUOc/ggAAIBSj5uOoFjFxsYqLS1NPXv21ObNm7V//36tXLlSffr0UW5uripUqKChQ4dq8ODBmj9/vvbv36+kpCS99tprmj9/vqvjAwAAAMWKGTbDbY/Z7uoIRSokJERr167ViBEj1KZNG509e1Y1atRQ27Zt5eb2178fTJgwQVWqVNGkSZP0008/yd/fX7fccoueeeYZF6cHAAAAipfNsizL1SFKi8zMTPn5+SkjI0O+vr5O286cOaMDBw4oLCxMXl5eLkqI0oifPQAogLF+BRiTcfVzFEZJzAyUIpfqBudjhg0AgJKqIH8hl/hLOYBrQyn9RwiuYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFzTDcAwbFjZ85AAAAc3HTEUOULVtWknT69Gl5e3u7OA1Kk+zsbEmSu7u7i5MAAIBiUUpv3lFSUdgM4e7uLn9/fx07dkySVK5cOdlsNhenwrXObrfr+PHjKleunMqU4dcBAACAafgbmkGCgoIkyVHagOLg5uam6tWr8w8EAAAABqKwGcRmsyk4OFgBAQHKyclxdRyUEh4eHnJz43JWAAAAE1HYDOTu7s71RAAAAAC4SyQAAAAAmIrCBgAAAACG4pRIAABQvLilOAAUGDNsAAAAAGAoChsAAAAAGIrCBgAAAACGorABAAAAgKG46QgAAABwubiJDq4yChsAAADMQPkBLsApkQAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIaisAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACGorABAAAAgKEobAAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIZyaWGbNGmSmjVrpgoVKiggIECdOnXS7t27ncacOXNGsbGxqlSpksqXL6/OnTvr6NGjTmMOHTqk9u3bq1y5cgoICNCwYcP0559/Oo1ZvXq1brnlFnl6eqp27dqaN2/eBXlmzpypmjVrysvLSxEREdq0aVOhswAAAABAUXFpYVuzZo1iY2O1YcMGJSQkKCcnR23atFFWVpZjzODBg/Xf//5Xixcv1po1a/Trr7/qgQcecGzPzc1V+/btlZ2drXXr1mn+/PmaN2+eRo8e7Rhz4MABtW/fXq1atVJycrIGDRqkRx99VCtXrnSMWbRokYYMGaIxY8YoKSlJjRs3VnR0tI4dO1bgLAAAAABQlGyWZVmuDnHO8ePHFRAQoDVr1ujOO+9URkaGqlSpooULF6pLly6SpB9//FH169fX+vXrdeutt+rzzz/XP/7xD/36668KDAyUJM2ePVsjRozQ8ePH5eHhoREjRujTTz/Vjh07HMfq0aOH0tPTtWLFCklSRESEmjVrphkzZkiS7Ha7QkNDNWDAAP3rX/8qUJb8ZGZmys/PTxkZGfL19S3S9w4AUAqN9SvguIyrm6OwCpKbzFeOzMWDzMWnpObOQ0G7gVHXsGVk/PUGV6xYUZK0detW5eTkKCoqyjGmXr16ql69utavXy9JWr9+vcLDwx1lTZKio6OVmZmpnTt3Osacv49zY87tIzs7W1u3bnUa4+bmpqioKMeYgmT5u7NnzyozM9PpAQAAAAAFZUxhs9vtGjRokG677TbdeOONkqTU1FR5eHjI39/faWxgYKBSU1MdY84va+e2n9t2qTGZmZn6448/dOLECeXm5l50zPn7yC/L302aNEl+fn6OR2hoaAHfDQAAAAAwqLDFxsZqx44dev/9910dpciMHDlSGRkZjsfhw4ddHQkAAABACVLG1QEkKS4uTsuXL9fXX3+tatWqOdYHBQUpOztb6enpTjNbR48eVVBQkGPM3+/meO7OjeeP+fvdHI8ePSpfX195e3vL3d1d7u7uFx1z/j7yy/J3np6e8vT0LMQ7AQAAAAD/z6UzbJZlKS4uTkuWLNFXX32lsLAwp+1NmjRR2bJllZiY6Fi3e/duHTp0SJGRkZKkyMhIbd++3elujgkJCfL19VWDBg0cY87fx7kx5/bh4eGhJk2aOI2x2+1KTEx0jClIFgAAAAAoSi6dYYuNjdXChQv1ySefqEKFCo5rwfz8/OTt7S0/Pz/169dPQ4YMUcWKFeXr66sBAwYoMjLScVfGNm3aqEGDBnr44YcVHx+v1NRUjRo1SrGxsY7ZrX/+85+aMWOGhg8frr59++qrr77SBx98oE8//dSRZciQIYqJiVHTpk3VvHlzTZ06VVlZWerTp48jU35ZAAAAAKAoubSwzZo1S5LUsmVLp/Vz585V7969JUmvvvqq3Nzc1LlzZ509e1bR0dF6/fXXHWPd3d21fPlyPfHEE4qMjJSPj49iYmI0fvx4x5iwsDB9+umnGjx4sKZNm6Zq1arprbfeUnR0tGNM9+7ddfz4cY0ePVqpqam66aabtGLFCqcbkeSXBQAAAACKkksLW0G+As7Ly0szZ87UzJkz8xxTo0YNffbZZ5fcT8uWLbVt27ZLjomLi1NcXNwVZQEAAACAomLMXSIBAAAAAM4obAAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIaisAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACGorABAAAAgKEobAAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIaisAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACGorABAAAAgKEobAAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIaisAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACGorABAAAAgKEobAAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIaisAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACGorABAAAAgKEobAAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIaisAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACGorABAAAAgKEobAAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIaisAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACGorABAAAAgKEobAAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIaisAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACGorABAAAAgKEobAAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIaisAEAAACAocq4OgAAAEYY61eAMRlXPwcAAOdhhg0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQfA8bYIKCfP+TxHdAAQAAlDLMsAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACG4qYjAC5fQW6Wwo1SAAAALhszbAAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIaisAEAAACAoShsAAAAAGAolxa2r7/+Wh06dFBISIhsNpuWLl3qtL13796y2WxOj7Zt2zqNSUtLU69eveTr6yt/f3/169dPp06dchrz/fff64477pCXl5dCQ0MVHx9/QZbFixerXr168vLyUnh4uD777DOn7ZZlafTo0QoODpa3t7eioqK0d+/eonkjAAAAAOAiyrjy4FlZWWrcuLH69u2rBx544KJj2rZtq7lz5zqWPT09nbb36tVLKSkpSkhIUE5Ojvr06aP+/ftr4cKFkqTMzEy1adNGUVFRmj17trZv366+ffvK399f/fv3lyStW7dOPXv21KRJk/SPf/xDCxcuVKdOnZSUlKQbb7xRkhQfH6/p06dr/vz5CgsL03PPPafo6Gj98MMP8vLyuhpvDy7XWL8CjMm4+jkAAACAK+TSwtauXTu1a9fukmM8PT0VFBR00W27du3SihUrtHnzZjVt2lSS9Nprr+nee+/Vyy+/rJCQEL377rvKzs7WnDlz5OHhoYYNGyo5OVlTpkxxFLZp06apbdu2GjZsmCRpwoQJSkhI0IwZMzR79mxZlqWpU6dq1KhR6tixoyRpwYIFCgwM1NKlS9WjR4+ieksAAAAAwMH4a9hWr16tgIAA1a1bV0888YR+++03x7b169fL39/fUdYkKSoqSm5ubtq4caNjzJ133ikPDw/HmOjoaO3evVu///67Y0xUVJTTcaOjo7V+/XpJ0oEDB5Samuo0xs/PTxEREY4xF3P27FllZmY6PQAAAACgoIwubG3bttWCBQuUmJioyZMna82aNWrXrp1yc3MlSampqQoICHB6TpkyZVSxYkWlpqY6xgQGBjqNObec35jzt5//vIuNuZhJkybJz8/P8QgNDS3U6wcAAABQurn0lMj8nH+qYXh4uBo1aqTrr79eq1evVuvWrV2YrGBGjhypIUOGOJYzMzMpbQAAAAAKzOgZtr+rVauWKleurH379kmSgoKCdOzYMacxf/75p9LS0hzXvQUFBeno0aNOY84t5zfm/O3nP+9iYy7G09NTvr6+Tg8AAAAAKKgSVdh++eUX/fbbbwoODpYkRUZGKj09XVu3bnWM+eqrr2S32xUREeEY8/XXXysnJ8cxJiEhQXXr1tV1113nGJOYmOh0rISEBEVGRkqSwsLCFBQU5DQmMzNTGzdudIwBAAAAgKLm0sJ26tQpJScnKzk5WdJfN/dITk7WoUOHdOrUKQ0bNkwbNmzQwYMHlZiYqI4dO6p27dqKjo6WJNWvX19t27bVY489pk2bNmnt2rWKi4tTjx49FBISIkl68MEH5eHhoX79+mnnzp1atGiRpk2b5nSq4sCBA7VixQq98sor+vHHHzV27Fht2bJFcXFxkiSbzaZBgwbp+eef17Jly7R9+3Y98sgjCgkJUadOnYr1PQMAAABQerj0GrYtW7aoVatWjuVzJSomJkazZs3S999/r/nz5ys9PV0hISFq06aNJkyY4PRdbO+++67i4uLUunVrubm5qXPnzpo+fbpju5+fn7744gvFxsaqSZMmqly5skaPHu24pb8ktWjRQgsXLtSoUaP0zDPPqE6dOlq6dKnjO9gkafjw4crKylL//v2Vnp6u22+/XStWrOA72AAAAABcNS4tbC1btpRlWXluX7lyZb77qFixouNLsvPSqFEjffPNN5cc07VrV3Xt2jXP7TabTePHj9f48ePzzQQAAAAARaFEXcMGAAAAAKWJ0bf1hwHG+hVgTMbVzwEAAACUQsywAQAAAIChmGEDULowawwAAEoQZtgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ3HTEQBA0ePmLgAAFAlm2AAAAADAUMywAYDpmK0CAKDUYoYNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFDQAAAAAMRWEDAAAAAENR2AAAAADAUBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFDQAAAAAMRWEDAAAAAENR2AAAAADAUBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ5Up7BMOHDigb775Rj///LNOnz6tKlWq6Oabb1ZkZKS8vLyuRkYAAAAAKJUKXNjeffddTZs2TVu2bFFgYKBCQkLk7e2ttLQ07d+/X15eXurVq5dGjBihGjVqXM3MAAAAAFAqFKiw3XzzzfLw8FDv3r310UcfKTQ01Gn72bNntX79er3//vtq2rSpXn/9dXXt2vWqBAYAAACA0qJAhe3FF19UdHR0nts9PT3VsmVLtWzZUhMnTtTBgweLKh8AAAAAlFoFKmyXKmt/V6lSJVWqVOmyAwEAAAAA/lLou0QmJSVp+/btjuVPPvlEnTp10jPPPKPs7OwiDQcAAAAApVmhC9vjjz+uPXv2SJJ++ukn9ejRQ+XKldPixYs1fPjwIg8IAAAAAKVVoQvbnj17dNNNN0mSFi9erDvvvFMLFy7UvHnz9NFHHxV1PgAAAAAotQpd2CzLkt1ulyR9+eWXuvfeeyVJoaGhOnHiRNGmAwAAAIBSrNCFrWnTpnr++ef1zjvvaM2aNWrfvr2kv75QOzAwsMgDAgAAAEBpVejCNnXqVCUlJSkuLk7PPvusateuLUn68MMP1aJFiyIPCAAAAAClVYFu63++Ro0aOd0l8pyXXnpJ7u7uRRIKAAAAAHAZhS0vXl5eRbUrAAAAAIAuo7C5ubnJZrPluT03N/eKAgEAAAAA/lLowrZkyRKn5ZycHG3btk3z58/XuHHjiiwYAAAAAJR2hS5sHTt2vGBdly5d1LBhQy1atEj9+vUrkmAAAAAAUNoV+i6Rebn11luVmJhYVLsDAAAAgFKvSArbH3/8oenTp6tq1apFsTsAAAAAgC7jlMjrrrvO6aYjlmXp5MmTKleunP7zn/8UaTgAAAAAKM0KXdimTp3qtOzm5qYqVaooIiJC1113XVHlAgAAAIBSr9CFLSYm5mrkAAAAAAD8TYGuYTt06FChdnrkyJHLCgMAAAAA+H8FKmzNmjXT448/rs2bN+c5JiMjQ//+979144036qOPPiqygAAAAABQWhXolMgffvhBEydO1D333CMvLy81adJEISEh8vLy0u+//64ffvhBO3fu1C233KL4+Hjde++9Vzs3AAAAAFzzCjTDVqlSJU2ZMkUpKSmaMWOG6tSpoxMnTmjv3r2SpF69emnr1q1av349ZQ0AAAAAikihbjri7e2tLl26qEuXLlcrDwAAAADgfy77i7P37dunlStX6o8//pD01/exAQAAAACKTqEL22+//abWrVvrhhtu0L333quUlBRJUr9+/fT0008XeUAAAAAAKK0KXdgGDx6ssmXL6tChQypXrpxjfffu3bVixYoiDQcAAAAApVmhvzj7iy++0MqVK1WtWjWn9XXq1NHPP/9cZMEAAAAAoLQr9AxbVlaW08zaOWlpafL09CySUAAAAACAyyhsd9xxhxYsWOBYttlsstvtio+PV6tWrYo0HAAAAACUZoU+JTI+Pl6tW7fWli1blJ2dreHDh2vnzp1KS0vT2rVrr0ZGAAAAACiVCj3DduONN2rPnj26/fbb1bFjR2VlZemBBx7Qtm3bdP3111+NjAAAAABQKhV6hk2S/Pz89OyzzxZ1FgAAAADAeS6rsJ05c0bff/+9jh07Jrvd7rTtvvvuK5JgAAAAAFDaFbqwrVixQo888ohOnDhxwTabzabc3NwiCQYAAAAApV2hr2EbMGCAunbtqpSUFNntdqcHZQ0AAAAAik6hC9vRo0c1ZMgQBQYGXo08AAAAAID/KXRh69Kli1avXn0VogAAAAAAzlfoa9hmzJihrl276ptvvlF4eLjKli3rtP2pp54qsnAAAAAAUJoVurC99957+uKLL+Tl5aXVq1fLZrM5ttlsNgobAAAAABSRQhe2Z599VuPGjdO//vUvubkV+oxKAAAAAEABFbpxZWdnq3v37pQ1AAAAALjKCt26YmJitGjRoquRBQAAAABwnkKfEpmbm6v4+HitXLlSjRo1uuCmI1OmTCmycAAAAABQmhW6sG3fvl0333yzJGnHjh1O286/AQkAAAAA4MoUurCtWrXqauQAAAAAAPwNdw4BAAAAAEMVaIbtgQce0Lx58+Tr66sHHnjgkmM//vjjIgkGAAAAAKVdgQqbn5+f4/o0Pz+/qxoIAAAAAPCXAhW2uXPnavz48Ro6dKjmzp17tTMBAAAAAFSIa9jGjRunU6dOXc0sAAAAAIDzFLiwWZZ1NXMAAAAAAP6mUHeJ5HvWAAAAAKD4FOp72G644YZ8S1taWtoVBQIAAAAA/KVQhW3cuHHcJRIAAAAAikmhCluPHj0UEBBwtbIAAAAAAM5T4GvYuH4NAAAAAIoXd4kEAAAAAEMV+JRIu91+NXMAAAAAAP6mULf1BwAAAAAUHwobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIaisAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACGorABAAAAgKEobAAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYyqWF7euvv1aHDh0UEhIim82mpUuXOm23LEujR49WcHCwvL29FRUVpb179zqNSUtLU69eveTr6yt/f3/169dPp06dchrz/fff64477pCXl5dCQ0MVHx9/QZbFixerXr168vLyUnh4uD777LNCZwEAAACAouTSwpaVlaXGjRtr5syZF90eHx+v6dOna/bs2dq4caN8fHwUHR2tM2fOOMb06tVLO3fuVEJCgpYvX66vv/5a/fv3d2zPzMxUmzZtVKNGDW3dulUvvfSSxo4dqzfffNMxZt26derZs6f69eunbdu2qVOnTurUqZN27NhRqCwAAAAAUJTKuPLg7dq1U7t27S66zbIsTZ06VaNGjVLHjh0lSQsWLFBgYKCWLl2qHj16aNeuXVqxYoU2b96spk2bSpJee+013XvvvXr55ZcVEhKid999V9nZ2ZozZ448PDzUsGFDJScna8qUKY5iN23aNLVt21bDhg2TJE2YMEEJCQmaMWOGZs+eXaAsAAAAAFDUjL2G7cCBA0pNTVVUVJRjnZ+fnyIiIrR+/XpJ0vr16+Xv7+8oa5IUFRUlNzc3bdy40THmzjvvlIeHh2NMdHS0du/erd9//90x5vzjnBtz7jgFyXIxZ8+eVWZmptMDAAAAAArK2MKWmpoqSQoMDHRaHxgY6NiWmpqqgIAAp+1lypRRxYoVncZcbB/nHyOvMedvzy/LxUyaNEl+fn6OR2hoaD6vGgAAAAD+n7GF7VowcuRIZWRkOB6HDx92dSQAAAAAJYixhS0oKEiSdPToUaf1R48edWwLCgrSsWPHnLb/+eefSktLcxpzsX2cf4y8xpy/Pb8sF+Pp6SlfX1+nBwAAAAAUlLGFLSwsTEFBQUpMTHSsy8zM1MaNGxUZGSlJioyMVHp6urZu3eoY89VXX8lutysiIsIx5uuvv1ZOTo5jTEJCgurWravrrrvOMeb845wbc+44BckCAAAAAEXNpYXt1KlTSk5OVnJysqS/bu6RnJysQ4cOyWazadCgQXr++ee1bNkybd++XY888ohCQkLUqVMnSVL9+vXVtm1bPfbYY9q0aZPWrl2ruLg49ejRQyEhIZKkBx98UB4eHurXr5927typRYsWadq0aRoyZIgjx8CBA7VixQq98sor+vHHHzV27Fht2bJFcXFxklSgLAAAAABQ1Fx6W/8tW7aoVatWjuVzJSomJkbz5s3T8OHDlZWVpf79+ys9PV233367VqxYIS8vL8dz3n33XcXFxal169Zyc3NT586dNX36dMd2Pz8/ffHFF4qNjVWTJk1UuXJljR492um72lq0aKGFCxdq1KhReuaZZ1SnTh0tXbpUN954o2NMQbIAAAAAQFFyaWFr2bKlLMvKc7vNZtP48eM1fvz4PMdUrFhRCxcuvORxGjVqpG+++eaSY7p27aquXbteURYAAAAAKErGXsMGAAAAAKUdhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFDQAAAAAMRWEDAAAAAEOVcXUAAAAA4FoWPj883zHbY7YXQxKURMywAQAAAIChKGwAAAAAYChOiQQAAECJwemFKG2YYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFDQAAAAAMRWEDAAAAAENR2AAAAADAUNzWHwAAAICTgnx9gsRXKBQHZtgAAAAAwFAUNgAAAAAwFKdEAgAA4xTkdCxOxQJQGjDDBgAAAACGorABAAAAgKEobAAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIYq4+oAAAAA14Lw+eH5jtkes70YkgC4ljDDBgAAAACGYoYNAACglGJWEDAfM2wAAAAAYCgKGwAAAAAYilMiAQAAAFwTrsXTfJlhAwAAAABDUdgAAAAAwFAUNgAAAAAwFNewAQBgoJr/+jTfMQe9iiEIjMTPB/LCz8a1h8IGAACKTEn8y2JJzFwSlcT3uSRmLql4r/PGKZEAAAAAYCgKGwAAAAAYisIGAAAAAIaisAEAAACAoShsAAAAAGAo7hIJAMA1Lnx+eL5jtsdsL4YkAIDCYoYNAAAAAAxFYQMAAAAAQ3FKJHCV8UWQAAAAuFwUNgAXRdEEAABwPQpbKVYS/0JeEjOj+JTEnw8yF4+SmBkAAIlr2AAAAADAWBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ/HF2QBwDQifH57vmO0x24shCQAAKErMsAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACG4qYjuGLc7AAAAAC4OphhAwAAAABDMcMGAH/DrDEAADAFM2wAAAAAYCgKGwAAAAAYilMiUSpxyhuAy8HvDgBAcWOGDQAAAAAMRWEDAAAAAENR2AAAAADAUFzDBuCq4pofAACAy8cMGwAAAAAYihk2AIBLMPsKAED+mGEDAAAAAENR2AAAAADAUBQ2AAAAADAUhQ0AAAAADMVNR4AShJs0AAAAlC7MsAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACGorABAAAAgKEobAAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIaisAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACGorABAAAAgKEobAAAAABgKAobAAAAABiKwgYAAAAAhqKwAQAAAIChKGwAAAAAYCgKGwAAAAAYisIGAAAAAIaisAEAAACAoShsAAAAAGAoChsAAAAAGIrCBgAAAACGorABAAAAgKGMLmxjx46VzWZzetSrV8+x/cyZM4qNjVWlSpVUvnx5de7cWUePHnXax6FDh9S+fXuVK1dOAQEBGjZsmP7880+nMatXr9Ytt9wiT09P1a5dW/Pmzbsgy8yZM1WzZk15eXkpIiJCmzZtuiqvGQAAAADOMbqwSVLDhg2VkpLieHz77beObYMHD9Z///tfLV68WGvWrNGvv/6qBx54wLE9NzdX7du3V3Z2ttatW6f58+dr3rx5Gj16tGPMgQMH1L59e7Vq1UrJyckaNGiQHn30Ua1cudIxZtGiRRoyZIjGjBmjpKQkNW7cWNHR0Tp27FjxvAkAAAAASiXjC1uZMmUUFBTkeFSuXFmSlJGRobfffltTpkzR3XffrSZNmmju3Llat26dNmzYIEn64osv9MMPP+g///mPbrrpJrVr104TJkzQzJkzlZ2dLUmaPXu2wsLC9Morr6h+/fqKi4tTly5d9OqrrzoyTJkyRY899pj69OmjBg0aaPbs2SpXrpzmzJlT/G8IAAAAgFLD+MK2d+9ehYSEqFatWurVq5cOHTokSdq6datycnIUFRXlGFuvXj1Vr15d69evlyStX79e4eHhCgwMdIyJjo5WZmamdu7c6Rhz/j7OjTm3j+zsbG3dutVpjJubm6Kiohxj8nL27FllZmY6PQAAAACgoIwubBEREZo3b55WrFihWbNm6cCBA7rjjjt08uRJpaamysPDQ/7+/k7PCQwMVGpqqiQpNTXVqayd235u26XGZGZm6o8//tCJEyeUm5t70THn9pGXSZMmyc/Pz/EIDQ0t9HsAAAAAoPQq4+oAl9KuXTvHnxs1aqSIiAjVqFFDH3zwgby9vV2YrGBGjhypIUOGOJYzMzMpbQAAAAAKzOgZtr/z9/fXDTfcoH379ikoKEjZ2dlKT093GnP06FEFBQVJkoKCgi64a+S55fzG+Pr6ytvbW5UrV5a7u/tFx5zbR148PT3l6+vr9AAAAACAgipRhe3UqVPav3+/goOD1aRJE5UtW1aJiYmO7bt379ahQ4cUGRkpSYqMjNT27dud7uaYkJAgX19fNWjQwDHm/H2cG3NuHx4eHmrSpInTGLvdrsTERMcYAAAAALgajC5sQ4cO1Zo1a3Tw4EGtW7dO999/v9zd3dWzZ0/5+fmpX79+GjJkiFatWqWtW7eqT58+ioyM1K233ipJatOmjRo0aKCHH35Y3333nVauXKlRo0YpNjZWnp6ekqR//vOf+umnnzR8+HD9+OOPev311/XBBx9o8ODBjhxDhgzRv//9b82fP1+7du3SE088oaysLPXp08cl7wsAAACA0sHoa9h++eUX9ezZU7/99puqVKmi22+/XRs2bFCVKlUkSa+++qrc3NzUuXNnnT17VtHR0Xr99dcdz3d3d9fy5cv1xBNPKDIyUj4+PoqJidH48eMdY8LCwvTpp59q8ODBmjZtmqpVq6a33npL0dHRjjHdu3fX8ePHNXr0aKWmpuqmm27SihUrLrgRCQAAAAAUJaML2/vvv3/J7V5eXpo5c6ZmzpyZ55gaNWros88+u+R+WrZsqW3btl1yTFxcnOLi4i45BgAAAACKktGnRAIAAABAaUZhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFDQAAAAAMRWEDAAAAAENR2AAAAADAUBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFDQAAAAAMRWEDAAAAAENR2AAAAADAUBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFDQAAAAAMRWEDAAAAAENR2AAAAADAUBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFDQAAAAAMRWEDAAAAAENR2AAAAADAUBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFDQAAAAAMRWEDAAAAAENR2AAAAADAUBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFDQAAAAAMRWEDAAAAAENR2AAAAADAUBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFDQAAAAAMRWEDAAAAAENR2AAAAADAUBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFDQAAAAAMRWEDAAAAAENR2AAAAADAUBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFDQAAAAAMRWEDAAAAAENR2AAAAADAUBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFrZBmzpypmjVrysvLSxEREdq0aZOrIwEAAAC4RlHYCmHRokUaMmSIxowZo6SkJDVu3FjR0dE6duyYq6MBAAAAuAZR2AphypQpeuyxx9SnTx81aNBAs2fPVrly5TRnzhxXRwMAAABwDSrj6gAlRXZ2trZu3aqRI0c61rm5uSkqKkrr16+/6HPOnj2rs2fPOpYzMjIkSZmZmVc3bAHZz57Od0ymzcp3TO4fufnvp4hec2nOLJXM3GS+NDLnsx8yX/pY/O4oUKb8kDmf/ZD50scqxZmlkpnblL+Ln8thWZd+XTYrvxGQJP3666+qWrWq1q1bp8jISMf64cOHa82aNdq4ceMFzxk7dqzGjRtXnDEBAAAAlCCHDx9WtWrV8tzODNtVNHLkSA0ZMsSxbLfblZaWpkqVKslms7kw2eXLzMxUaGioDh8+LF9fX1fHKRAyFw8yF4+SmFkqmbnJXDzIXDxKYmapZOYmc/EoiZn/zrIsnTx5UiEhIZccR2EroMqVK8vd3V1Hjx51Wn/06FEFBQVd9Dmenp7y9PR0Wufv73+1IhYrX1/fEvc/B5mLB5mLR0nMLJXM3GQuHmQuHiUxs1Qyc5O5eJTEzOfz8/PLdww3HSkgDw8PNWnSRImJiY51drtdiYmJTqdIAgAAAEBRYYatEIYMGaKYmBg1bdpUzZs319SpU5WVlaU+ffq4OhoAAACAaxCFrRC6d++u48ePa/To0UpNTdVNN92kFStWKDAw0NXRio2np6fGjBlzwameJiNz8SBz8SiJmaWSmZvMxYPMxaMkZpZKZm4yF4+SmPlycZdIAAAAADAU17ABAAAAgKEobAAAAABgKAobAAAAABiKwgYAAAAAhqKwocBmzpypmjVrysvLSxEREdq0aZOrI13S119/rQ4dOigkJEQ2m01Lly51daR8TZo0Sc2aNVOFChUUEBCgTp06affu3a6OdUmzZs1So0aNHF9cGRkZqc8//9zVsQrlxRdflM1m06BBg1wdJU9jx46VzWZzetSrV8/VsfJ15MgRPfTQQ6pUqZK8vb0VHh6uLVu2uDrWJdWsWfOC99pmsyk2NtbV0fKUm5ur5557TmFhYfL29tb111+vCRMmyPT7ip08eVKDBg1SjRo15O3trRYtWmjz5s2ujuWQ3+eIZVkaPXq0goOD5e3traioKO3du9c1Yf8nv8wff/yx2rRpo0qVKslmsyk5OdklOc93qcw5OTkaMWKEwsPD5ePjo5CQED3yyCP69ddfXRf4f/J7r8eOHat69erJx8dH1113naKiorRx40bXhP2fwvzd6J///KdsNpumTp1abPkuJr/MvXv3vuD3ddu2bV0T9iqhsKFAFi1apCFDhmjMmDFKSkpS48aNFR0drWPHjrk6Wp6ysrLUuHFjzZw509VRCmzNmjWKjY3Vhg0blJCQoJycHLVp00ZZWVmujpanatWq6cUXX9TWrVu1ZcsW3X333erYsaN27tzp6mgFsnnzZr3xxhtq1KiRq6Pkq2HDhkpJSXE8vv32W1dHuqTff/9dt912m8qWLavPP/9cP/zwg1555RVdd911ro52SZs3b3Z6nxMSEiRJXbt2dXGyvE2ePFmzZs3SjBkztGvXLk2ePFnx8fF67bXXXB3tkh599FElJCTonXfe0fbt29WmTRtFRUXpyJEjro4mKf/Pkfj4eE2fPl2zZ8/Wxo0b5ePjo+joaJ05c6aYk/6//DJnZWXp9ttv1+TJk4s5Wd4ulfn06dNKSkrSc889p6SkJH388cfavXu37rvvPhckdZbfe33DDTdoxowZ2r59u7799lvVrFlTbdq00fHjx4s56f8r6N+NlixZog0bNigkJKSYkuWtIJnbtm3r9Hv7vffeK8aExcACCqB58+ZWbGysYzk3N9cKCQmxJk2a5MJUBSfJWrJkiatjFNqxY8csSdaaNWtcHaVQrrvuOuutt95ydYx8nTx50qpTp46VkJBg3XXXXdbAgQNdHSlPY8aMsRo3buzqGIUyYsQI6/bbb3d1jCs2cOBA6/rrr7fsdruro+Spffv2Vt++fZ3WPfDAA1avXr1clCh/p0+fttzd3a3ly5c7rb/lllusZ5991kWp8vb3zxG73W4FBQVZL730kmNdenq65enpab333nsuSHihS332HThwwJJkbdu2rVgz5acgn9ebNm2yJFk///xz8YQqgILkzsjIsCRZX375ZfGEykdemX/55ReratWq1o4dO6waNWpYr776arFny8vFMsfExFgdO3Z0SZ7iwgwb8pWdna2tW7cqKirKsc7NzU1RUVFav369C5Nd+zIyMiRJFStWdHGSgsnNzdX777+vrKwsRUZGujpOvmJjY9W+fXunn22T7d27VyEhIapVq5Z69eqlQ4cOuTrSJS1btkxNmzZV165dFRAQoJtvvln//ve/XR2rULKzs/Wf//xHffv2lc1mc3WcPLVo0UKJiYnas2ePJOm7777Tt99+q3bt2rk4Wd7+/PNP5ebmysvLy2m9t7e38bPHknTgwAGlpqY6/f7w8/NTREQEn41XWUZGhmw2m/z9/V0dpcCys7P15ptvys/PT40bN3Z1nDzZ7XY9/PDDGjZsmBo2bOjqOAW2evVqBQQEqG7dunriiSf022+/uTpSkSrj6gAw34kTJ5Sbm6vAwECn9YGBgfrxxx9dlOraZ7fbNWjQIN1222268cYbXR3nkrZv367IyEidOXNG5cuX15IlS9SgQQNXx7qk999/X0lJSUZdL3MpERERmjdvnurWrauUlBSNGzdOd9xxh3bs2KEKFSq4Ot5F/fTTT5o1a5aGDBmiZ555Rps3b9ZTTz0lDw8PxcTEuDpegSxdulTp6enq3bu3q6Nc0r/+9S9lZmaqXr16cnd3V25uriZOnKhevXq5OlqeKlSooMjISE2YMEH169dXYGCg3nvvPa1fv161a9d2dbx8paamStJFPxvPbUPRO3PmjEaMGKGePXvK19fX1XHytXz5cvXo0UOnT59WcHCwEhISVLlyZVfHytPkyZNVpkwZPfXUU66OUmBt27bVAw88oLCwMO3fv1/PPPOM2rVrp/Xr18vd3d3V8YoEhQ0wVGxsrHbs2FEi/qW5bt26Sk5OVkZGhj788EPFxMRozZo1xpa2w4cPa+DAgUpISLjgX/dNdf5MSaNGjRQREaEaNWrogw8+UL9+/VyYLG92u11NmzbVCy+8IEm6+eabtWPHDs2ePbvEFLa3335b7dq1M+I6jkv54IMP9O6772rhwoVq2LChkpOTNWjQIIWEhBj9Xr/zzjvq27evqlatKnd3d91yyy3q2bOntm7d6upoMFBOTo66desmy7I0a9YsV8cpkFatWik5OVknTpzQv//9b3Xr1k0bN25UQECAq6NdYOvWrZo2bZqSkpKMPqPg73r06OH4c3h4uBo1aqTrr79eq1evVuvWrV2YrOhwSiTyVblyZbm7u+vo0aNO648ePaqgoCAXpbq2xcXFafny5Vq1apWqVavm6jj58vDwUO3atdWkSRNNmjRJjRs31rRp01wdK09bt27VsWPHdMstt6hMmTIqU6aM1qxZo+nTp6tMmTLKzc11dcR8+fv764YbbtC+fftcHSVPwcHBF5T2+vXrG38q5zk///yzvvzySz366KOujpKvYcOG6V//+pd69Oih8PBwPfzwwxo8eLAmTZrk6miXdP3112vNmjU6deqUDh8+rE2bNiknJ0e1atVydbR8nfv847OxeJwraz///LMSEhJKxOyaJPn4+Kh27dq69dZb9fbbb6tMmTJ6++23XR3ror755hsdO3ZM1atXd3w2/vzzz3r66adVs2ZNV8crsFq1aqly5cpGfz4WFoUN+fLw8FCTJk2UmJjoWGe325WYmFgirlMqSSzLUlxcnJYsWaKvvvpKYWFhro50Wex2u86ePevqGHlq3bq1tm/fruTkZMejadOm6tWrl5KTk0vEKRSnTp3S/v37FRwc7Oooebrtttsu+FqKPXv2qEaNGi5KVDhz585VQECA2rdv7+oo+Tp9+rTc3Jw/0t3d3WW3212UqHB8fHwUHBys33//XStXrlTHjh1dHSlfYWFhCgoKcvpszMzM1MaNG/lsLGLnytrevXv15ZdfqlKlSq6OdNlM/nx8+OGH9f333zt9NoaEhGjYsGFauXKlq+MV2C+//KLffvvN6M/HwuKUSBTIkCFDFBMTo6ZNm6p58+aaOnWqsrKy1KdPH1dHy9OpU6ec/nXlwIEDSk5OVsWKFVW9enUXJstbbGysFi5cqE8++UQVKlRwXAfh5+cnb29vF6e7uJEjR6pdu3aqXr26Tp48qYULF2r16tVG/3KvUKHCBdcF+vj4qFKlSsZeLzh06FB16NBBNWrU0K+//qoxY8bI3d1dPXv2dHW0PA0ePFgtWrTQCy+8oG7dumnTpk1688039eabb7o6Wr7sdrvmzp2rmJgYlSlj/kdlhw4dNHHiRFWvXl0NGzbUtm3bNGXKFPXt29fV0S5p5cqVsixLdevW1b59+zRs2DDVq1fPmM+W/D5HBg0apOeff1516tRRWFiYnnvuOYWEhKhTp07GZk5LS9OhQ4cc32N27h9VgoKCXDYzeKnMwcHB6tKli5KSkrR8+XLl5uY6PhsrVqwoDw8Pl2SWLp27UqVKmjhxou677z4FBwfrxIkTmjlzpo4cOeLSrwjJ7+fj72W4bNmyCgoKUt26dYs7qsOlMlesWFHjxo1T586dFRQUpP3792v48OGqXbu2oqOjXZa5yLn4LpUoQV577TWrevXqloeHh9W8eXNrw4YNro50SatWrbIkXfCIiYlxdbQ8XSyvJGvu3Lmujpanvn37WjVq1LA8PDysKlWqWK1bt7a++OILV8cqNNNv69+9e3crODjY8vDwsKpWrWp1797d2rdvn6tj5eu///2vdeONN1qenp5WvXr1rDfffNPVkQpk5cqVliRr9+7dro5SIJmZmdbAgQOt6tWrW15eXlatWrWsZ5991jp79qyro13SokWLrFq1alkeHh5WUFCQFRsba6Wnp7s6lkN+nyN2u9167rnnrMDAQMvT09Nq3bq1y39m8ss8d+7ci24fM2aMkZnPff3AxR6rVq1yWeb8cv/xxx/W/fffb4WEhFgeHh5WcHCwdd9991mbNm0yNvPFmHBb/0tlPn36tNWmTRurSpUqVtmyZa0aNWpYjz32mJWamurSzEXNZlmWVaQNEAAAAABQJLiGDQAAAAAMRWEDAAAAAENR2AAAAADAUBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAXsNlsWrp0qatjAAAMR2EDAOB/evfuLZvNJpvNprJlyyosLEzDhw/XmTNnXB1N8+bNk7+/v9Pyuazu7u667rrrFBERofHjxysjI8N1QQEARYrCBgDAedq2bauUlBT99NNPevXVV/XGG29ozJgxro51Ub6+vkpJSdEvv/yidevWqX///lqwYIFuuukm/frrr66OBwAoAhQ2AADO4+npqaCgIIWGhqpTp06KiopSQkKCY/tvv/2mnj17qmrVqipXrpzCw8P13nvvOe2jZcuWeuqppzR8+HBVrFhRQUFBGjt27CWPO2bMGAUHB+v7778vcFabzaagoCAFBwerfv366tevn9atW6dTp05p+PDhhXrdAAAzUdgAAMjDjh07tG7dOnl4eDjWnTlzRk2aNNGnn36qHTt2qH///nr44Ye1adMmp+fOnz9fPj4+2rhxo+Lj4zV+/Hin4neOZVkaMGCAFixYoG+++UaNGjW6oswBAQHq1auXli1bptzc3CvaFwDA9cq4OgAAACZZvny5ypcvrz///FNnz56Vm5ubZsyY4dhetWpVDR061LE8YMAArVy5Uh988IGaN2/uWN+oUSPHqZR16tTRjBkzlJiYqHvuuccx5s8//9RDDz2kbdu26dtvv1XVqlWL5DXUq1dPJ0+e1G+//aaAgIAi2ScAwDUobAAAnKdVq1aaNWuWsrKy9Oqrr6pMmTLq3LmzY3tubq5eeOEFffDBBzpy5Iiys7N19uxZlStXzmk/f58pCw4O1rFjx5zWDR48WJ6entqwYYMqV65cZK/BsixJf50yCQAo2TglEgCA8/j4+Kh27dpq3Lix5syZo40bN+rtt992bH/ppZc0bdo0jRgxQqtWrVJycrKio6OVnZ3ttJ+yZcs6LdtsNtntdqd199xzj44cOaKVK1cW6WvYtWuXfH19ValSpSLdLwCg+FHYAADIg5ubm5555hmNGjVKf/zxhyRp7dq16tixox566CE1btxYtWrV0p49ey5r//fdd58WLlyoRx99VO+//36RZD527JgWLlyoTp06yc2Nj3kAKOn4TQ4AwCV07dpV7u7umjlzpqS/rkdLSEjQunXrtGvXLj3++OM6evToZe///vvv1zvvvKM+ffroww8/LNRzLctSamqqUlJStGvXLs2ZM0ctWrSQn5+fXnzxxcvOBAAwB9ewAQBwCWXKlFFcXJzi4+P1xBNPaNSoUfrpp58UHR2tcuXKqX///urUqdMVfVl1ly5dZLfb9fDDD8vNzU0PPPBAgZ6XmZmp4OBg2Ww2+fr6qm7duoqJidHAgQPl6+t72XkAAOawWeeuTAYAAAAAGIVTIgEAAADAUBQ2AAAAADAUhQ0AAAAADEVhAwAAAABDUdgAAAAAwFAUNgAAAAAwFIUNAAAAAAxFYQMAAAAAQ1HYAAAAAMBQFDYAAAAAMBSFDQAAAAAM9X9lkhU2o6/UQwAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# 设置展示图大小\n", - "fig, ax = plt.subplots(figsize=(10,8))\n", - "\n", - "x = np.arange(len(rank_ids)) # the label locations\n", - "\n", - "rects1 = ax.bar(x - width, compute_time, width, label='Computing')\n", - "rects2 = ax.bar(x, communication_time, width, label='Communication')\n", - "rects3 = ax.bar(x + width, free_time, width, label='Free')\n", - "\n", - "\n", - "# Add some text for labels, title and custom x-axis tick labels, etc.\n", - "ax.set_ylabel('Time(us)')\n", - "ax.set_xlabel('Rank ID')\n", - "ax.set_title('Step Time')\n", - "ax.set_xticks(x)\n", - "ax.set_xticklabels(rank_ids)\n", - "ax.legend()\n", - "print(words)" - ] - }, - { - "cell_type": "markdown", - "id": "3511befaff513e8e", - "metadata": { - "collapsed": false - }, - "source": [ - "## 2)识别通信链路慢" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "2a1e617d2a117125", - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[INFO]Cluster has been analyzed because of the existence of cluster analysis output directory.\n", - "[INFO]Skip Cluster analyze backend.\n" - ] - } - ], - "source": [ - "dataset = interface.get_data('cluster', 'slow link')" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "c8bca314-a8da-4a5b-985a-c36f00154552", - "metadata": {}, - "outputs": [], - "source": [ - "# EDIT THE DATA TO SHOW WHAT YOU WANT\n", - "data = dataset.get('data')\n", - "words = dataset.get('bottleneck')\n", - "rank_ids = list(data.keys())\n", - "# 柱状图显示属性\n", - "sdma_bw = [data.get(key, {}).get(\"SDMA bandwidth(GB/s)\") for key in rank_ids]\n", - "rdma_bw = [data.get(key, {}).get(\"RDMA bandwidth(GB/s)\") for key in rank_ids]\n", - "# 柱宽\n", - "width = 0.4" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "99ef04c9-ec07-4790-bbb6-0de9bf6c99d0", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "RDMA bandwidth(GB/s): \n", - "The average is 0.041, while the maximum is 0.041GB/s and the minimum is 0.041GB/s. the difference is 0.0GB/s. \n", - "SDMA bandwidth(GB/s): \n", - "The average is 0.054, while the maximum is 0.056GB/s and the minimum is 0.052GB/s. the difference is 0.003GB/s. \n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkAAAAHHCAYAAABXx+fLAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAABMFklEQVR4nO3df3zO9f7H8ee1zTYbmzG2EebXMT9mQkR+1LGMo1gyWmHmR6ei1ApxMHJqpS9RnKRjqJNIySmJM8s6ZMhmRSGVzK8xi43Nz+3z/aOb63S1YT+uaxufx/12u27t+nze1/v1+lzs8uzz67IYhmEIAADARJwqugEAAIDyRgACAACmQwACAACmQwACAACmQwACAACmQwACAACmQwACAACmQwACAACmQwACAACmQwACgAqWlJQki8WipKSkMs9lsVg0ffr0G46bPn26LBZLsea8OvbUqVNl7A6oPAhAQCVmsViK9bDHP5w3q5deeklr1qwp1thffvml0Hvn5eWltm3bav78+crPz3dss5VcSd5L4GbnUtENALi2d9991+b5O++8o4SEhELLW7RoUZ5tVSovvfSSBg4cqPDw8GK/JjIyUn/5y18kSdnZ2Vq3bp2efPJJHTp0SK+++qqDOi0f58+fl4tL6T7aS/NeAjcrAhBQiQ0ZMsTm+bZt25SQkFBo+R/l5eXJw8PDka1VKMMwdOHCBVWtWrVUr2/Xrp3Ne/jEE0+oU6dOWr58+U0fgNzd3Su6BeCmwCEw4CZ39913q3Xr1kpJSVH37t3l4eGhyZMnS5L+/e9/q2/fvqpbt67c3NzUpEkTzZw5s9ChnqtzfP/997rnnnvk4eGhevXqadasWYXqvfHGG2rVqpU8PDzk4+OjDh06aPny5db1V88X2bdvnwYNGiQvLy/VqlVL48aN04ULF2zmunLlimbOnKkmTZrIzc1NgYGBmjx5si5evGgzLjAwUPfdd582bNigDh06qGrVqnrrrbdksViUm5urZcuWWQ9pDR8+vMTvocVikZ+fX6E9J454/44cOaLw8HB5enqqTp06euaZZwpt7+uvvy5nZ2edOXPGumz27NmyWCyKiYmxLsvPz1f16tU1ceJEm2354zlAW7Zs0R133CF3d3c1adJEb731VpHvwY3eyzNnzmj48OGqUaOGvL29FR0drby8vCLfU6CyYw8QcAvIyspSnz599NBDD2nIkCHy8/OTJC1dulTVqlVTTEyMqlWrpi+++ELTpk1TTk5OoT0dp0+fVu/evTVgwAANGjRIH374oSZOnKjg4GD16dNHkvT222/rqaee0sCBA62B5ttvv9X27dv18MMP28w3aNAgBQYGKi4uTtu2bdPrr7+u06dP65133rGOGTVqlJYtW6aBAwfq2Wef1fbt2xUXF6e9e/fq448/tplv//79ioyM1F//+leNHj1azZs317vvvqtRo0apY8eOevTRRyVJTZo0ueH7lZeXZz2hNycnR59//rnWr1+vSZMm2Yyz9/t3/vx59ezZU+np6XrqqadUt25dvfvuu/riiy9s5urWrZsKCgq0ZcsW3XfffZKkzZs3y8nJSZs3b7aO27Vrl86dO6fu3btfc1t3796tXr16qXbt2po+fbquXLmi2NhY69+Rq4rzXg4aNEiNGjVSXFycUlNT9c9//lN16tTRK6+8csP3HKh0DAA3jTFjxhh//LXt0aOHIclYuHBhofF5eXmFlv31r381PDw8jAsXLhSa45133rEuu3jxouHv7288+OCD1mX9+/c3WrVqdd0eY2NjDUlGv379bJY/8cQThiTjm2++MQzDMNLS0gxJxqhRo2zGPffcc4Yk44svvrAua9iwoSHJWL9+faF6np6eRlRU1HV7uurgwYOGpCIfjz/+uFFQUGAz3t7v39y5cw1JxgcffGBdlpubazRt2tSQZGzatMkwDMPIz883vLy8jAkTJhiGYRgFBQVGrVq1jIiICMPZ2dk4e/asYRiGMWfOHMPJyck4ffq0dT5JRmxsrPV5eHi44e7ubhw6dMi67PvvvzecnZ0L/V261nt59c90xIgRNssfeOABo1atWoXGAzcDDoEBtwA3NzdFR0cXWv77c2TOnj2rU6dOqVu3bsrLy9O+fftsxlarVs3mvBhXV1d17NhRP//8s3VZjRo1dOTIEX399dc37GnMmDE2z5988klJ0rp162z++/tDOpL07LPPSpI+++wzm+WNGjVSWFjYDesWx6OPPqqEhAQlJCToo48+0pgxY/TWW28V6sXe79+6desUEBCggQMHWpd5eHhY97hc5eTkpC5duui///2vJGnv3r3KysrS888/L8MwlJycLOm3vUKtW7dWjRo1itzO/Px8bdiwQeHh4WrQoIF1eYsWLUr1Xj722GM2z7t166asrCzl5OSUeC6gohGAgFtAvXr15OrqWmj5d999pwceeEDe3t7y8vJS7dq1rf9IZ2dn24y97bbbCt0XxsfHR6dPn7Y+nzhxoqpVq6aOHTuqWbNmGjNmjL766qsie2rWrJnN8yZNmsjJyUm//PKLJOnQoUNycnJS06ZNbcb5+/urRo0aOnTokM3yRo0aXecdKJlmzZopNDRUoaGhGjBggObPn68nnnhCc+fO1e7du63j7P3+HTp0SE2bNi00rnnz5oV67Natm1JSUnT+/Hlt3rxZAQEBateunUJCQqyHwbZs2aJu3bpdczszMzN1/vz5Qn8W16p5I78PUdJv2yfJZhuBmwUBCLgFFHU11JkzZ9SjRw998803euGFF/Tpp58qISHBer5GQUGBzXhnZ+ci5zYMw/pzixYttH//fq1YsUJdu3bVRx99pK5duyo2NvaGPV7rpnvFvRlfaa/4Kq6ePXtKknWviyPev5Lo2rWrLl++rOTkZG3evNkadLp166bNmzdr3759yszMvG4Asjd7byNQkTgJGrhFJSUlKSsrS6tXr7Y5SfbgwYNlmtfT01ODBw/W4MGDdenSJQ0YMEAvvviiJk2aZHMJ9oEDB2z22vz4448qKChQYGCgJKlhw4YqKCjQgQMHbO5jdOLECZ05c0YNGzYsVj/FDVA3cuXKFUnSuXPnJDnm/WvYsKH27NkjwzBs+t6/f3+hsR07dpSrq6s2b96szZs3a/z48ZKk7t276+2331ZiYqL1+bXUrl1bVatW1YEDBwqtK6qmvd5L4GbAHiDgFnX1/9Z//3/nly5d0j/+8Y9Sz5mVlWXz3NXVVS1btpRhGLp8+bLNugULFtg8f+ONNyTJekXU1RsRzp0712bcnDlzJEl9+/YtVk+enp42l4uX1qeffipJCgkJkeSY9+8vf/mLjh07pg8//NC6LC8vT4sWLSo01t3dXXfccYfef/99paen2+wBOn/+vF5//XU1adJEAQEB16zn7OyssLAwrVmzRunp6dble/fu1YYNGwqNt9d7CdwM2AME3KK6dOkiHx8fRUVF6amnnpLFYtG7775bpsMVvXr1kr+/v+666y75+flp7969mj9/vvr27avq1avbjD148KD69eun3r17Kzk5Wf/617/08MMPWwNGSEiIoqKitGjRIuvhph07dmjZsmUKDw/XPffcU6ye2rdvr40bN2rOnDmqW7euGjVqpE6dOl33NampqfrXv/4l6beTmxMTE/XRRx+pS5cu6tWrlyTHvH+jR4/W/PnzNWzYMKWkpCggIEDvvvvuNW9a2a1bN7388svy9vZWcHCwJKlOnTpq3ry59u/fX6x7Hs2YMUPr169Xt27d9MQTT+jKlSvWezl9++23NmNL814CN62KuvwMQMld6zL4a12a/tVXXxl33nmnUbVqVaNu3brGhAkTjA0bNthccn29OaKiooyGDRtan7/11ltG9+7djVq1ahlubm5GkyZNjPHjxxvZ2dnWMVcvmf7++++NgQMHGtWrVzd8fHyMsWPHGufPn7eZ//Lly8aMGTOMRo0aGVWqVDHq169vTJo0yeYSc8P47TL4vn37FrmN+/btM7p3725UrVrVkHTdS+KLugzexcXFaNy4sTF+/Hjr5eWOev8MwzAOHTpk9OvXz/Dw8DB8fX2NcePGGevXry80p2EYxmeffWZIMvr06WOzfNSoUYYkY/HixYVq6g+XwRuGYXz55ZdG+/btDVdXV6Nx48bGwoULrX9Ov3et9/Lq2MzMTJvxS5YsMSQZBw8eLNQHUNlZDIOz1wDYz/Tp0zVjxgxlZmbK19e3otsBgCJxDhAAADAdAhAAADAdAhAAADAdzgECAACmwx4gAABgOgQgAABgOtwIsQgFBQU6duyYqlevzq3hAQC4SRiGobNnz6pu3bpycrr+Ph4CUBGOHTum+vXrV3QbAACgFA4fPqzbbrvtumMIQEW4ekv/w4cPy8vLq4K7AQAAxZGTk6P69esX+mqeohCAinD1sJeXlxcBCACAm0xxTl/hJGgAAGA6BCAAAGA6BCAAAGA6nAMEAICd5Ofn6/LlyxXdxi2rSpUqcnZ2tstcBCAAAMrIMAxlZGTozJkzFd3KLa9GjRry9/cv8336CEAAAJTR1fBTp04deXh4cBNdBzAMQ3l5eTp58qQkKSAgoEzzEYAAACiD/Px8a/ipVatWRbdzS6tataok6eTJk6pTp06ZDodxEjQAAGVw9ZwfDw+PCu7EHK6+z2U914oABACAHXDYq3zY630mAAEAANMhAAEAANPhJGgAABwk8PnPyrXeLy/3LfFrMjMzNW3aNH322Wc6ceKEfHx8FBISomnTpumuu+5SYGCgDh06JElyd3eXn5+fOnbsqMcee0x//vOf/1f7l1/UqFEjOTk5KT09XfXq1bOuO378uOrXr6/8/HwdPHhQgYGBNj2EhYVp48aN2rZtm+64447SbXwJsQcIAAATe/DBB7Vr1y4tW7ZMP/zwgz755BPdfffdysrKso554YUXdPz4ce3fv1/vvPOOatSoodDQUL344ouF5qtXr57eeecdm2XLli2zCUS/l56erq1bt2rs2LGKj4+378ZdB3uAAAAwqTNnzmjz5s1KSkpSjx49JEkNGzZUx44dbcZVr15d/v7+kqQGDRqoe/fuCggI0LRp0zRw4EA1b97cOjYqKkpLlizRpEmTrMuWLFmiqKgozZw5s1APS5Ys0X333afHH39cd955p+bMmWO93N2R2AMEAIBJVatWTdWqVdOaNWt08eLFEr123LhxMgxD//73v22W9+vXT6dPn9aWLVskSVu2bNHp06d1//33F5rDMAwtWbJEQ4YMUVBQkJo2baoPP/yw9BtUAuwBAoByVNJzQkpzTgfM69sjZ6w/t7mtxg3Hu7i4aOnSpRo9erQWLlyodu3aqUePHnrooYfUpk2b6762Zs2aqlOnjn755Reb5VWqVNGQIUMUHx+vrl27Kj4+XkOGDFGVKlUKzbFx40bl5eUpLCxMkjRkyBAtXrxYQ4cOvWHvZcUeIAAATOzBBx/UsWPH9Mknn6h3795KSkpSu3bttHTp0hu+1jCMIu/LM2LECK1atUoZGRlatWqVRowYUeTr4+PjNXjwYLm4/LY/JjIyUl999ZV++umnMm1TcbAHCABucSXZ68QeJ3Nyd3fXvffeq3vvvVdTp07VqFGjFBsbq3ah4bqcX6BjZ87b7F2SpDOnf1VmZqbcffz17ZEz8vrduuDgYAUFBSkyMlItWrRQ69atlZaWZvP6X3/9VR9//LEuX76sN99807o8Pz9f8fHxRZ5gbU/sAQIAADZatmyp3Nzc6455b/FCOTk56Z6wokPziBEjlJSUdM29P++9955uu+02ffPNN0pLS7M+Zs+eraVLlyo/P7/M23E97AECYGrsHbG/ijjPqSL+HK/WrFfdWdPvqaNLVXNkcblgl7nLS1ZWliIiIjRixAi1adNG1atX186dOzVr1iz179/fOi733DmdOnlCV65c1tH0Q/rs41Va/f47eur5aWrQqHGRc48ePVoRERGqUaNGkesXL16sgQMHqnXr1jbL69evr0mTJmn9+vXq29dxv3MEIAAATKpatWrq1KmTXnvtNf3000+6fPmy6tevr9GjR2vy5Mk6kPXblWH/mP2S/jH7JVVxdZVv7ToKvv0OLVrxb3Xs0u2ac7u4uMjX17fIdSkpKfrmm2/09ttvF1rn7e2tnj17avHixQQgAABuRp+MvUtS8a7IupE/noNjD25uboqLi1NcXNw1RlzU58nfFmuuwMBAGYZxzfVt27a1rr/R2HXr1hWrZllwDhAAADAdAhAAADAdAhAAADAdAhAAADAdToIGSonLp+2Pr4kAUF7YAwQAAEyHAAQAAEyHQ2AmYIbDCmbYRgCA/RCAKgDnjgAAULE4BAYAAEyHPUAArom9lUDZtPlnQ/vNVYwx3446VKI5hw8frmXLlkn67bu7brvtNkVEROiFF16Qu7u7JCmkvo91vHtVD9Xx81fbDp0UGf2oWrZpa12XlJSke+65RzVq1NDx48etr5ekr7/+Wh07dpSkIr8CIygoSAcPHtShQ4fk7+9fom0oLfYAAQBgYr1799bx48f1888/67XXXtNbb72l2NhYmzEvzF6gxJR9Wp2YrEl/f1V5ebka0i9Un364otB81atX18cff2yzbPHixWrQoEGR9bds2aLz589r4MCB1jBWHtgDBIdgzwEA3Bzc3Nyse13q16+v0NBQJSQk6JVXXrGOqe7lLd86fpKkevUbqEuPP2vKM48rbuoE9QjtLa8aNaxjo6KiFB8fr8jISEnS+fPntWLFCj311FOaOXNmofqLFy/Www8/rB49emjcuHGaOHGiA7f2fwhAwE2CK90AONqePXu0detWNWx440N3Q0Y9oU8/XKHkzZsUdv8D1uVDhw7Vq6++qvT0dDVo0EAfffSRAgMD1a5du0JznD17VqtWrdL27dsVFBSk7Oxsbd68Wd26dbPrdhWFQ2AAAJjY2rVrVa1aNbm7uys4OFgnT57U+PHjb/i6Rk2aSZKOHUm3WV6nTh316dNHS5culSTFx8drxIgRRc6xYsUKNWvWTK1atZKzs7MeeughLV68uGwbVEwEIAAATOyee+5RWlqatm/frqioKEVHR+vBBx+84euunsxssVgKrRsxYoSWLl2qn3/+WcnJyXrkkUeKnCM+Pl5DhgyxPh8yZIhWrVqls2fPlnJrio8ABACAiXl6eqpp06YKCQlRfHy8tm/fXqy9MAd//EGSVK9+4cNlffr00fnz5zVy5Ejdf//9qlWrVqEx33//vbZt26YJEybIxcVFLi4uuvPOO5WXl6cVKwqfXG1vBCAAACBJcnJy0uTJkzVlyhSdP3/+umP/tfhNVateXZ263l1onYuLi4YNG6akpKRrHv5avHixunfvrm+++UZpaWnWR0xMTLkcBiMAAQAAq4iICDk7O2vBggXWZWdzsnXq5AkdO5Ku5P9u0rN/jdLnaz7U316cLS9v7yLnmTlzpjIzMxUWFlZo3eXLl/Xuu+8qMjJSrVu3tnmMGjVK27dv13fffeewbZS4CgwAAPyOi4uLxo4dq1mzZqlrv98uZZ/27BhJkpubu+r4B+j2O+7Ue58mqkVwyDXncXV1la+vb5HrPvnkE2VlZemBBx4otK5FixZq0aKFFi9erDlz5thhi4pGAAIAwEGu3pm5zW01yj7XkTNlnuOPrl6p9UfPP/+8nn/+eX175Iy+OXy6WHPdfffdRd7l+arw8HDr+gcffFD5+fnXHPv9998Xq2ZZcAgMAACYDgEIAACYTqUIQAsWLFBgYKDc3d3VqVMn7dix47rjV61apaCgIOtNm9atW2ezfvjw4bJYLDaP3r17O3ITAADATaTCA9DKlSsVExOj2NhYpaamKiQkRGFhYTp58mSR47du3arIyEiNHDlSu3btUnh4uMLDw7Vnzx6bcVe/3O3q4/333y+PzQEAADeBCg9Ac+bM0ejRoxUdHa2WLVtq4cKF8vDwUHx8fJHj582bp969e2v8+PFq0aKFZs6cqXbt2mn+/Pk2465+udvVh4+PT3lsDgDAZAoMSTKk65wADPu53onWJVGhAejSpUtKSUlRaGiodZmTk5NCQ0OVnJxc5GuSk5NtxktSWFhYofFJSUmqU6eOmjdvrscff1xZWVn23wAAgOmduVCgy/mGjCuXKroVU8jLy5MkValSpUzzVOhl8KdOnVJ+fr78/Pxslvv5+Wnfvn1FviYjI6PI8RkZGdbnvXv31oABA9SoUSP99NNPmjx5svr06aPk5GQ5OzsXmvPixYu6ePGi9XlOTk5ZNgsAYCLnrxhK/Pmc7nN1lk9NyeLiKv3h+7EuXLhQ5jolDVjlXdMe9a7HMAzl5eXp5MmTqlGjRpH/npfELXkfoIceesj6c3BwsNq0aaMmTZooKSlJPXv2LDQ+Li5OM2bMKM8WAQC3kNV7cyVJPRvnq4qzRZJtAHI9X7XMNU6evv5XU/xRede0R73iqFGjhvz9/cs8T4UGIF9fXzk7O+vEiRM2y0+cOHHNjfP39y/ReElq3LixfH199eOPPxYZgCZNmqSYmBjr85ycHNWvX78kmwIAMDFD0kd7c/XZgTz5uDvJ6Q9fkJ747N1lrjFqdVKJxpd3TXvUu5EqVaqUec/PVRUagFxdXdW+fXslJiYqPDxcklRQUKDExESNHTu2yNd07txZiYmJevrpp63LEhIS1Llz52vWOXLkiLKyshQQEFDkejc3N7m5uZV6OwAAkKQLVwwdP1f4Dsfu7u5lnvvo2WvfObko5V3THvXKU4VfBRYTE6O3335by5Yt0969e/X4448rNzdX0dHRkqRhw4Zp0qRJ1vHjxo3T+vXrNXv2bO3bt0/Tp0/Xzp07rYHp3LlzGj9+vLZt26ZffvlFiYmJ6t+/v5o2bVrkF7IBAADzqfBzgAYPHqzMzExNmzZNGRkZatu2rdavX2890Tk9PV1OTv/LaV26dNHy5cs1ZcoUTZ48Wc2aNdOaNWvUunVrSZKzs7O+/fZbLVu2TGfOnFHdunXVq1cvzZw5k708AABAUiUIQJI0duzYax7ySkpKKrQsIiJCERERRY6vWrWqNmzYYM/2AADALabCD4EBAACUNwIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHQIQAAAwHZeKbgA3MN27BGOzHdcHgJL9Pkr8TgKVGAEIhZV36KqIkFeZt7EiarKNlRd/jhVbryJqmuFzrhL8PnIIDAAAmA4BCAAAmE6lCEALFixQYGCg3N3d1alTJ+3YseO641etWqWgoCC5u7srODhY69atu+bYxx57TBaLRXPnzrVz1wAA4GZV4QFo5cqViomJUWxsrFJTUxUSEqKwsDCdPHmyyPFbt25VZGSkRo4cqV27dik8PFzh4eHas2dPobEff/yxtm3bprp16zp6MwAAwE2kwgPQnDlzNHr0aEVHR6tly5ZauHChPDw8FB8fX+T4efPmqXfv3ho/frxatGihmTNnql27dpo/f77NuKNHj+rJJ5/Ue++9pypVqpTHpgAAgJtEhQagS5cuKSUlRaGhodZlTk5OCg0NVXJycpGvSU5OthkvSWFhYTbjCwoKNHToUI0fP16tWrW6YR8XL15UTk6OzQMAANy6KjQAnTp1Svn5+fLz87NZ7ufnp4yMjCJfk5GRccPxr7zyilxcXPTUU08Vq4+4uDh5e3tbH/Xr1y/hlgAAgJtJhR8Cs7eUlBTNmzdPS5culcViKdZrJk2apOzsbOvj8OHDDu4SAABUpAoNQL6+vnJ2dtaJEydslp84cUL+/v5Fvsbf3/+64zdv3qyTJ0+qQYMGcnFxkYuLiw4dOqRnn31WgYGBRc7p5uYmLy8vmwcAALh1VWgAcnV1Vfv27ZWYmGhdVlBQoMTERHXu3LnI13Tu3NlmvCQlJCRYxw8dOlTffvut0tLSrI+6detq/Pjx2rBhg+M2BgAA3DQq/KswYmJiFBUVpQ4dOqhjx46aO3eucnNzFR0dLUkaNmyY6tWrp7i4OEnSuHHj1KNHD82ePVt9+/bVihUrtHPnTi1atEiSVKtWLdWqVcumRpUqVeTv76/mzZuX78YBAIBKqcID0ODBg5WZmalp06YpIyNDbdu21fr1660nOqenp8vJ6X87qrp06aLly5drypQpmjx5spo1a6Y1a9aodevWFbUJAADgJlPhAUiSxo4dq7Fjxxa5LikpqdCyiIgIRUREFHv+X375pZSdAQCAW9EtdxUYAADAjRCAAACA6RCAAACA6RCAAACA6RCAAACA6RCAAACA6RCAAACA6dglAF28eNEe0wAAAJSLUgWgzz//XFFRUWrcuLGqVKkiDw8PeXl5qUePHnrxxRd17Ngxe/cJAABgNyUKQB9//LH+9Kc/acSIEXJxcdHEiRO1evVqbdiwQf/85z/Vo0cPbdy4UY0bN9Zjjz2mzMxMR/UNAABQaiX6KoxZs2bptddeU58+fWy+n+uqQYMGSZKOHj2qN954Q//617/0zDPP2KdTAAAAOylRAEpOTi7WuHr16unll18uVUMAAACOZrerwPLz85WWlqbTp0/ba0oAAACHKHUAevrpp7V48WJJv4WfHj16qF27dqpfv36R3+AOAABQWZQ6AH344YcKCQmRJH366ac6ePCg9u3bp2eeeUZ/+9vf7NYgAACAvZU6AJ06dUr+/v6SpHXr1ikiIsJ6hdju3bvt1iAAAIC9lToA+fn56fvvv1d+fr7Wr1+ve++9V5KUl5cnZ2dnuzUIAABgbyW6Cuz3oqOjNWjQIAUEBMhisSg0NFSStH37dgUFBdmtQQAAAHsrdQCaPn26WrdurcOHDysiIkJubm6SJGdnZz3//PN2axAAAMDeShyAhg0bpv79+yssLEwDBw4stD4qKsoujQEAADhKic8Batq0qV566SXVrl1bffr00ZtvvqmjR486ojcAAACHKHEAmjZtmlJSUnTgwAHdf//9WrNmjZo0aaL27dvrhRdeUFpamgPaBAAAsJ9SXwV222236YknntCGDRuUmZmpiRMnav/+/frzn/+shg0bauzYsfruu+/s2SsAAIBd2OWrMKpXr65BgwbpvffeU2ZmpuLj4+Xs7Fzs7w4DAAAoT6W+CuyPLl26pEuXLqlatWrq2bOnevbsaa+pAQAA7KpUe4CWLFmiJ598Uu+9954kadKkSapevbq8vb117733Kisry65NAgAA2FOJA9CLL76oMWPGaN++fXrqqaf0+OOPa+nSpXrhhRf08ssva9++fZoyZYojegUAALCLEh8CW7p0qRYvXqzIyEjt3LlTnTp10gcffKAHH3xQktS6dWs99thjdm8UAADAXkq8Byg9PV1du3aVJHXo0EEuLi5q3bq1dX2bNm10/Phx+3UIAABgZyUOQJcvX7Z+7YUkubq6qkqVKtbnLi4uys/Pt093AAAADlCqq8C+//57ZWRkSJIMw9C+fft07tw5SdKpU6fs1x0AAIADlCoA9ezZU4ZhWJ/fd999kiSLxSLDMGSxWOzTHQAAgAOUOAAdPHjQEX0AAACUmxIHoIYNGzqiDwAAgHJTqkNgOTk58vLykiStW7dOV65csa5zdnZW37597dMdAACAA5Q4AK1du1ZTp07Vrl27JEmDBw9Wbm6udb3FYtHKlSs1cOBA+3UJAABgRyW+DH7RokV68sknbZb9+OOPKigoUEFBgeLi4hQfH2+3BgEAAOytxAFo9+7duuuuu665vk+fPtq5c2eZmgIAAHCkEgeg48eP29wIcdOmTapfv771ebVq1ZSdnW2f7gAAABygxAGoZs2a+vHHH63PO3ToYHMn6AMHDqhmzZr26Q4AAMABShyAunfvrtdff/2a619//XV17969TE0BAAA4UokD0MSJE/Wf//xHERER+vrrr5Wdna3s7Gzt2LFDDz74oDZu3KiJEyc6olcAAAC7KPFl8LfffrtWrlypUaNGafXq1TbrfHx8tGLFCrVr185uDQIAANhbqW6E2L9/f917773asGGDDhw4IElq1qyZevXqJU9PT7s2CAAAYG+lCkCS5OHhoQceeMCevQAAAJSLEp0DtGLFimKPPXz4sL766qsSNwQAAOBoJQpAb775plq0aKFZs2Zp7969hdZnZ2dr3bp1evjhh9WuXTtlZWXZrVEAAAB7KdEhsC+//FKffPKJ3njjDU2aNEmenp7y8/OTu7u7Tp8+rYyMDPn6+mr48OHas2eP/Pz8HNU3AABAqZX4HKB+/fqpX79+OnXqlLZs2aJDhw7p/Pnz8vX11e23367bb79dTk4lvroeAACg3JT6JGhfX1+Fh4fbsRUAAIDyUeoAdNWlS5d08uRJFRQU2Cxv0KBBWacGAABwiFIHoAMHDmjEiBHaunWrzXLDMGSxWJSfn1/m5gAAAByh1CfrDB8+XE5OTlq7dq1SUlKUmpqq1NRU7dq1S6mpqSWaa8GCBQoMDJS7u7s6deqkHTt2XHf8qlWrFBQUJHd3dwUHB2vdunU266dPn66goCB5enrKx8dHoaGh2r59e4m3EQAA3JpKvQcoLS1NKSkpCgoKKlMDK1euVExMjBYuXKhOnTpp7ty5CgsL0/79+1WnTp1C47du3arIyEjFxcXpvvvu0/LlyxUeHq7U1FS1bt1akvSnP/1J8+fPV+PGjXX+/Hm99tpr6tWrl3788UfVrl27TP0CAICbX6n3ALVs2VKnTp0qcwNz5szR6NGjFR0drZYtW2rhwoXy8PBQfHx8kePnzZun3r17a/z48WrRooVmzpypdu3aaf78+dYxDz/8sEJDQ9W4cWO1atVKc+bMUU5Ojr799tsy9wsAAG5+JQpAOTk51scrr7yiCRMmKCkpSVlZWTbrcnJyijXfpUuXlJKSotDQ0P815OSk0NBQJScnF/ma5ORkm/GSFBYWds3xly5d0qJFi+Tt7a2QkJAix1y8eLFU/QMAgJtTiQ6B1ahRQxaLxfrcMAz17NnTZkxJToI+deqU8vPzC90w0c/PT/v27SvyNRkZGUWOz8jIsFm2du1aPfTQQ8rLy1NAQIASEhLk6+tb5JxxcXGaMWPGDfsFAAC3hhIFoE2bNjmqD7u75557lJaWplOnTuntt9/WoEGDtH379iLPK5o0aZJiYmKsz3NyclS/fv3ybBcAAJSjEgWgHj16WH9OT09X/fr1bfYISb/tATp8+HCx5vP19ZWzs7NOnDhhs/zEiRPy9/cv8jX+/v7FGu/p6ammTZuqadOmuvPOO9WsWTMtXrxYkyZNKjSnm5ub3NzcitUzAAC4+ZX6JOhGjRopMzOz0PJff/1VjRo1KtYcrq6uat++vRITE63LCgoKlJiYqM6dOxf5ms6dO9uMl6SEhIRrjv/9vBcvXixWXwAA4NZW6svgr57r80fnzp2Tu7t7seeJiYlRVFSUOnTooI4dO2ru3LnKzc1VdHS0JGnYsGGqV6+e4uLiJEnjxo1Tjx49NHv2bPXt21crVqzQzp07tWjRIklSbm6uXnzxRfXr108BAQE6deqUFixYoKNHjyoiIqK0mwsAAG4hJQ5AV8+VsVgsmjp1qjw8PKzr8vPztX37drVt27bY8w0ePFiZmZmaNm2aMjIy1LZtW61fv956onN6errNl6t26dJFy5cv15QpUzR58mQ1a9ZMa9assd4DyNnZWfv27dOyZct06tQp1apVS3fccYc2b96sVq1alXRzAQDALajEAWjXrl2SftsDtHv3brm6ulrXubq6KiQkRM8991yJ5hw7dqzGjh1b5LqkpKRCyyIiIq65N8fd3V2rV68uUX0AAGAuJQ5AV68Ei46O1rx58+Tl5WX3pgAAAByp1OcALVmyxJ59AAAAlJsSBaABAwYUeyyHoQAAQGVVosvgvb29rQ8vLy8lJiZq586d1vUpKSlKTEyUt7e33RsFAACwlxLtAfr9Ya+JEydq0KBBWrhwoZydnSX9dhXYE088wXlBAACgUiv1jRDj4+P13HPPWcOP9Nsl6DExMdf8JncAAIDKoNQB6MqVK0V+Yem+fftUUFBQpqYAAAAcqdRXgUVHR2vkyJH66aef1LFjR0nS9u3b9fLLL1vv4gwAAFAZlToA/d///Z/8/f01e/ZsHT9+XJIUEBCg8ePH69lnn7VbgwAAAPZW6gDk5OSkCRMmaMKECcrJyZEkTn4GAAA3hVIHoN8j+AAAgJtJiQJQu3btlJiYKB8fH91+++1Ffhv8VampqWVuDgAAwBFKFID69+8vNzc368/XC0AAAACVVYkCUGxsrPXn6dOn27sXAACAclHq+wBNmzZNmzZt0oULF+zZDwAAgMOVOgAlJyfr/vvvV40aNdStWzdNmTJFGzdu1Pnz5+3ZHwAAgN2VOgAlJCTozJkzSkxM1F/+8hft3LlTAwYMUI0aNdS1a1d79ggAAGBXZboM3sXFRXfddZdq166tmjVrqnr16lqzZk2RX5EBAABQWZR6D9CiRYv08MMPq169eurSpYvWr1+vrl27aufOncrMzLRnjwAAAHZV6j1Ajz32mGrXrq1nn31WTzzxhKpVq2bPvgAAABym1HuAVq9erUceeUQrVqxQ7dq11aVLF02ePFn/+c9/lJeXZ88eAQAA7KrUe4DCw8MVHh4uScrOztbmzZu1atUq3XfffXJycuLyeAAAUGmV6STorKwsffnll0pKSlJSUpK+++47+fj4qFu3bvbqDwAAwO5KHYCCg4O1d+9e+fj4qHv37ho9erR69OihNm3a2LM/AAAAuyvTSdA9evRQ69at7dkPAACAw5U6AI0ZM8aefQAAAJSbEgWgmJiYYo+dM2dOiZsBAAAoDyUKQLt27bJ5npqaqitXrqh58+aSpB9++EHOzs5q3769/ToEAACwsxIFoE2bNll/njNnjqpXr65ly5bJx8dHknT69GlFR0dzFRgAAKjUSn0jxNmzZysuLs4afiTJx8dHf//73zV79my7NAcAAOAIpQ5AOTk5RX7nV2Zmps6ePVumpgAAAByp1AHogQceUHR0tFavXq0jR47oyJEj+uijjzRy5EgNGDDAnj0CAADYVakvg1+4cKGee+45Pfzww7p8+fJvk7m4aOTIkXr11Vft1iAAAIC9lToAeXh46B//+IdeffVV/fTTT5KkJk2ayNPT027NAQAAOEKZvgtMkjw9Pfn6CwAAcFMpdQDKzc3Vyy+/rMTERJ08eVIFBQU263/++ecyNwcAAOAIpQ5Ao0aN0pdffqmhQ4cqICBAFovFnn0BAAA4TKkD0Oeff67PPvtMd911lz37AQAAcLhSXwbv4+OjmjVr2rMXAACAclHqADRz5kxNmzZNeXl59uwHAADA4Up9CGz27Nn66aef5Ofnp8DAQFWpUsVmfWpqapmbAwAAcIRSB6Dw8HA7tgEAAFB+Sh2AYmNj7dkHAABAuSn1OUAAAAA3q1LvAcrPz9drr72mDz74QOnp6bp06ZLN+l9//bXMzQEAADhCqfcAzZgxQ3PmzNHgwYOVnZ2tmJgYDRgwQE5OTpo+fbodWwQAALCvUgeg9957T2+//baeffZZubi4KDIyUv/85z81bdo0bdu2zZ49AgAA2FWpA1BGRoaCg4MlSdWqVVN2drYk6b777tNnn31mn+4AAAAcoNQB6LbbbtPx48clSU2aNNF//vMfSdLXX38tNzc3+3QHAADgAKUOQA888IASExMlSU8++aSmTp2qZs2aadiwYRoxYoTdGgQAALC3Ul8F9vLLL1t/Hjx4sBo2bKitW7eqWbNmuv/+++3SHAAAgCOUeg9QVlaW9efDhw9r3bp1On78uLy9ve3SGAAAgKOUOADt3r1bgYGBqlOnjoKCgpSWlqY77rhDr732mhYtWqQ///nPWrNmjQNaBQAAsI8SB6AJEyYoODhY//3vf3X33XfrvvvuU9++fZWdna3Tp0/rr3/9q83hseJYsGCBAgMD5e7urk6dOmnHjh3XHb9q1SoFBQXJ3d1dwcHBWrdunXXd5cuXNXHiRAUHB8vT01N169bVsGHDdOzYsZJuKgAAuEWVOAB9/fXXevHFF3XXXXfp//7v/3Ts2DE98cQTcnJykpOTk5588knt27ev2POtXLlSMTExio2NVWpqqkJCQhQWFqaTJ08WOX7r1q2KjIzUyJEjtWvXLoWHhys8PFx79uyRJOXl5Sk1NVVTp05VamqqVq9erf3796tfv34l3VQAAHCLKnEA+vXXX+Xv7y/pt/v/eHp6ysfHx7rex8dHZ8+eLfZ8c+bM0ejRoxUdHa2WLVtq4cKF8vDwUHx8fJHj582bp969e2v8+PFq0aKFZs6cqXbt2mn+/PmSJG9vbyUkJGjQoEFq3ry57rzzTs2fP18pKSlKT08v6eYCAIBbUKlOgrZYLNd9XlyXLl1SSkqKQkND/9eQk5NCQ0OVnJxc5GuSk5NtxktSWFjYNcdLUnZ2tiwWi2rUqFHk+osXLyonJ8fmAQAAbl2lugx++PDh1psdXrhwQY899pg8PT0l/RYmiuvUqVPKz8+Xn5+fzXI/P79rHkbLyMgocnxGRkaR4y9cuKCJEycqMjJSXl5eRY6Ji4vTjBkzit03AAC4uZU4AEVFRdk8HzJkSKExw4YNK31HdnT58mUNGjRIhmHozTffvOa4SZMmKSYmxvo8JydH9evXL48WAQBABShxAFqyZIndivv6+srZ2VknTpywWX7ixAnreUZ/5O/vX6zxV8PPoUOH9MUXX1xz748kubm58fUdAACYSKlvhGgPrq6uat++vfUrNSSpoKBAiYmJ6ty5c5Gv6dy5s814SUpISLAZfzX8HDhwQBs3blStWrUcswEAAOCmVOqvwrCXmJgYRUVFqUOHDurYsaPmzp2r3NxcRUdHS/rtcFq9evUUFxcnSRo3bpx69Oih2bNnq2/fvlqxYoV27typRYsWSfot/AwcOFCpqalau3at8vPzrecH1axZU66urhWzoQAAoNKo8AA0ePBgZWZmatq0acrIyFDbtm21fv1664nO6enpcnL6346qLl26aPny5ZoyZYomT56sZs2aac2aNWrdurUk6ejRo/rkk08kSW3btrWptWnTJt19993lsl0AAKDyqvAAJEljx47V2LFji1yXlJRUaFlERIQiIiKKHB8YGCjDMOzZHgAAuMVU6DlAAAAAFYEABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATIcABAAATKfCA9CCBQsUGBgod3d3derUSTt27Lju+FWrVikoKEju7u4KDg7WunXrbNavXr1avXr1Uq1atWSxWJSWlubA7gEAwM2oQgPQypUrFRMTo9jYWKWmpiokJERhYWE6efJkkeO3bt2qyMhIjRw5Urt27VJ4eLjCw8O1Z88e65jc3Fx17dpVr7zySnltBgAAuMlUaACaM2eORo8erejoaLVs2VILFy6Uh4eH4uPjixw/b9489e7dW+PHj1eLFi00c+ZMtWvXTvPnz7eOGTp0qKZNm6bQ0NDy2gwAAHCTqbAAdOnSJaWkpNgEFScnJ4WGhio5ObnI1yQnJxcKNmFhYdccX1wXL15UTk6OzQMAANy6KiwAnTp1Svn5+fLz87NZ7ufnp4yMjCJfk5GRUaLxxRUXFydvb2/ro379+mWaDwAAVG4VfhJ0ZTBp0iRlZ2dbH4cPH67olgAAgAO5VFRhX19fOTs768SJEzbLT5w4IX9//yJf4+/vX6LxxeXm5iY3N7cyzQEAAG4eFbYHyNXVVe3bt1diYqJ1WUFBgRITE9W5c+ciX9O5c2eb8ZKUkJBwzfEAAABFqbA9QJIUExOjqKgodejQQR07dtTcuXOVm5ur6OhoSdKwYcNUr149xcXFSZLGjRunHj16aPbs2erbt69WrFihnTt3atGiRdY5f/31V6Wnp+vYsWOSpP3790v6be9RWfcUAQCAW0OFBqDBgwcrMzNT06ZNU0ZGhtq2bav169dbT3ROT0+Xk9P/dlJ16dJFy5cv15QpUzR58mQ1a9ZMa9asUevWra1jPvnkE2uAkqSHHnpIkhQbG6vp06eXz4YBAIBKrUIDkCSNHTtWY8eOLXJdUlJSoWURERGKiIi45nzDhw/X8OHD7dQdAAC4FXEVGAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMB0CEAAAMJ1KEYAWLFigwMBAubu7q1OnTtqxY8d1x69atUpBQUFyd3dXcHCw1q1bZ7PeMAxNmzZNAQEBqlq1qkJDQ3XgwAFHbgIAALiJVHgAWrlypWJiYhQbG6vU1FSFhIQoLCxMJ0+eLHL81q1bFRkZqZEjR2rXrl0KDw9XeHi49uzZYx0za9Ysvf7661q4cKG2b98uT09PhYWF6cKFC+W1WQAAoBKr8AA0Z84cjR49WtHR0WrZsqUWLlwoDw8PxcfHFzl+3rx56t27t8aPH68WLVpo5syZateunebPny/pt70/c+fO1ZQpU9S/f3+1adNG77zzjo4dO6Y1a9aU45YBAIDKqkID0KVLl5SSkqLQ0FDrMicnJ4WGhio5ObnI1yQnJ9uMl6SwsDDr+IMHDyojI8NmjLe3tzp16nTNOQEAgLm4VGTxU6dOKT8/X35+fjbL/fz8tG/fviJfk5GRUeT4jIwM6/qry6415o8uXryoixcvWp9nZ2dLknJyckqwNcVXcDGv2GNzLEbxJ75GvyWpVxE12cbi1WQby1iTbbR/vWvUNMM2OrSmGT7nHPTv69V/tw3jxr1UaACqLOLi4jRjxoxCy+vXr18B3djyLsngl0s0utLUZBsrQU220f71KqIm22j/ehVR0wyfc3baxms5e/asvL2vX6NCA5Cvr6+cnZ114sQJm+UnTpyQv79/ka/x9/e/7vir/z1x4oQCAgJsxrRt27bIOSdNmqSYmBjr84KCAv3666+qVauWLBZLibfLXnJyclS/fn0dPnxYXl5et2RNtvHWqMk23ho12UZq3iz1rsUwDJ09e1Z169a94dgKDUCurq5q3769EhMTFR4eLum38JGYmKixY8cW+ZrOnTsrMTFRTz/9tHVZQkKCOnfuLElq1KiR/P39lZiYaA08OTk52r59ux5//PEi53Rzc5Obm5vNsho1apRp2+zJy8ur3P9ClXdNtvHWqMk23ho12UZq3iz1inKjPT9XVfghsJiYGEVFRalDhw7q2LGj5s6dq9zcXEVHR0uShg0bpnr16ikuLk6SNG7cOPXo0UOzZ89W3759tWLFCu3cuVOLFi2SJFksFj399NP6+9//rmbNmqlRo0aaOnWq6tataw1ZAADA3Co8AA0ePFiZmZmaNm2aMjIy1LZtW61fv956EnN6erqcnP53sVqXLl20fPlyTZkyRZMnT1azZs20Zs0atW7d2jpmwoQJys3N1aOPPqozZ86oa9euWr9+vdzd3ct9+wAAQOVT4QFIksaOHXvNQ15JSUmFlkVERCgiIuKa81ksFr3wwgt64YUX7NVihXBzc1NsbGyhw3O3Uk228daoyTbeGjXZRmreLPXswWIU51oxAACAW0iF3wkaAACgvBGAAACA6RCAAACA6RCAAACA6RCAKqkFCxYoMDBQ7u7u6tSpk3bs2OHQev/97391//33q27durJYLFqzZo1D68XFxemOO+5Q9erVVadOHYWHh2v//v0Oq/fmm2+qTZs21pt0de7cWZ9//rnD6v3Ryy+/bL1HlaNMnz5dFovF5hEUFOSwelcdPXpUQ4YMUa1atVS1alUFBwdr586dDqsXGBhYaDstFovGjBnjkHr5+fmaOnWqGjVqpKpVq6pJkyaaOXNmsb5rqLTOnj2rp59+Wg0bNlTVqlXVpUsXff3113ab/0a/74ZhaNq0aQoICFDVqlUVGhqqAwcOOLTm6tWr1atXL+sd+NPS0hxW7/Lly5o4caKCg4Pl6empunXratiwYTp27JjDakq//Y4GBQXJ09NTPj4+Cg0N1fbt2x1W7/cee+wxWSwWzZ07t9T1ilNz+PDhhX43e/fuXaaajkIAqoRWrlypmJgYxcbGKjU1VSEhIQoLC9PJkycdVjM3N1chISFasGCBw2r83pdffqkxY8Zo27ZtSkhI0OXLl9WrVy/l5uY6pN5tt92ml19+WSkpKdq5c6f+/Oc/q3///vruu+8cUu/3vv76a7311ltq06aNw2u1atVKx48ftz62bNni0HqnT5/WXXfdpSpVqujzzz/X999/r9mzZ8vHx8dhNb/++mubbUxISJCk694aoyxeeeUVvfnmm5o/f7727t2rV155RbNmzdIbb7zhkHqSNGrUKCUkJOjdd9/V7t271atXL4WGhuro0aN2mf9Gv++zZs3S66+/roULF2r79u3y9PRUWFiYLly44LCaubm56tq1q1555ZVS1yhuvby8PKWmpmrq1KlKTU3V6tWrtX//fvXr189hNSXpT3/6k+bPn6/du3dry5YtCgwMVK9evZSZmemQeld9/PHH2rZtW7G+HsIeNXv37m3zO/r++++Xua5DGKh0OnbsaIwZM8b6PD8/36hbt64RFxdXLvUlGR9//HG51Lrq5MmThiTjyy+/LLeaPj4+xj//+U+H1jh79qzRrFkzIyEhwejRo4cxbtw4h9WKjY01QkJCHDZ/USZOnGh07dq1XGv+0bhx44wmTZoYBQUFDpm/b9++xogRI2yWDRgwwHjkkUccUi8vL89wdnY21q5da7O8Xbt2xt/+9je71/vj73tBQYHh7+9vvPrqq9ZlZ86cMdzc3Iz333/fITV/7+DBg4YkY9euXXapdaN6V+3YscOQZBw6dKjcamZnZxuSjI0bNzqs3pEjR4x69eoZe/bsMRo2bGi89tprZa51vZpRUVFG//797VbDkdgDVMlcunRJKSkpCg0NtS5zcnJSaGiokpOTK7Azx8rOzpYk1axZ0+G18vPztWLFCuXm5lq/Q85RxowZo759+9r8eTrSgQMHVLduXTVu3FiPPPKI0tPTHVrvk08+UYcOHRQREaE6dero9ttv19tvv+3Qmr936dIl/etf/9KIESMc9sXFXbp0UWJion744QdJ0jfffKMtW7aoT58+Dql35coV5efnF7pzfdWqVR2+R0+SDh48qIyMDJu/s97e3urUqdMt/xlksVjK7XsgL126pEWLFsnb21shISEOqVFQUKChQ4dq/PjxatWqlUNqFCUpKUl16tRR8+bN9fjjjysrK6vcapdEpbgTNP7n1KlTys/Pt34VyFV+fn7at29fBXXlWAUFBXr66ad111132Xylib3t3r1bnTt31oULF1StWjV9/PHHatmypcPqrVixQqmpqXY9d+N6OnXqpKVLl6p58+Y6fvy4ZsyYoW7dumnPnj2qXr26Q2r+/PPPevPNNxUTE6PJkyfr66+/1lNPPSVXV1dFRUU5pObvrVmzRmfOnNHw4cMdVuP5559XTk6OgoKC5OzsrPz8fL344ot65JFHHFKvevXq6ty5s2bOnKkWLVrIz89P77//vpKTk9W0aVOH1Py9jIwMSSryM+jqulvNhQsXNHHiREVGRjr8izzXrl2rhx56SHl5eQoICFBCQoJ8fX0dUuuVV16Ri4uLnnrqKYfMX5TevXtrwIABatSokX766SdNnjxZffr0UXJyspydncutj+IgAKHCjRkzRnv27HH4/902b95caWlpys7O1ocffqioqCh9+eWXDglBhw8f1rhx45SQkFBu30H3+z0Sbdq0UadOndSwYUN98MEHGjlypENqFhQUqEOHDnrppZckSbfffrv27NmjhQsXlksAWrx4sfr06WOXcxuu5YMPPtB7772n5cuXq1WrVkpLS9PTTz+tunXrOmwb3333XY0YMUL16tWTs7Oz2rVrp8jISKWkpDiknpldvnxZgwYNkmEYevPNNx1e75577lFaWppOnTqlt99+W4MGDdL27dtVp04du9ZJSUnRvHnzlJqa6rC9o0V56KGHrD8HBwerTZs2atKkiZKSktSzZ89y66M4OARWyfj6+srZ2VknTpywWX7ixAn5+/tXUFeOM3bsWK1du1abNm3Sbbfd5tBarq6uatq0qdq3b6+4uDiFhIRo3rx5DqmVkpKikydPql27dnJxcZGLi4u+/PJLvf7663JxcVF+fr5D6v5ejRo19Kc//Uk//vijw2oEBAQUCpAtWrRw+KE3STp06JA2btyoUaNGObTO+PHj9fzzz+uhhx5ScHCwhg4dqmeeeUZxcXEOq9mkSRN9+eWXOnfunA4fPqwdO3bo8uXLaty4scNqXnX1c8YMn0FXw8+hQ4eUkJDg8L0/kuTp6ammTZvqzjvv1OLFi+Xi4qLFixfbvc7mzZt18uRJNWjQwPoZdOjQIT377LMKDAy0e71rady4sXx9fR36OVRaBKBKxtXVVe3bt1diYqJ1WUFBgRITEx1+vkp5MgxDY8eO1ccff6wvvvhCjRo1KvceCgoKdPHiRYfM3bNnT+3evVtpaWnWR4cOHfTII48oLS2tXHYFnzt3Tj/99JMCAgIcVuOuu+4qdPuCH374QQ0bNnRYzauWLFmiOnXqqG/fvg6tk5eXJycn249KZ2dnFRQUOLSu9Ns/lgEBATp9+rQ2bNig/v37O7xmo0aN5O/vb/MZlJOTo+3bt99Sn0FXw8+BAwe0ceNG1apVq0L6cNTn0NChQ/Xtt9/afAbVrVtX48eP14YNG+xe71qOHDmirKwsh34OlRaHwCqhmJgYRUVFqUOHDurYsaPmzp2r3NxcRUdHO6zmuXPnbBL6wYMHlZaWppo1a6pBgwZ2rzdmzBgtX75c//73v1W9enXruQXe3t6qWrWq3etNmjRJffr0UYMGDXT27FktX75cSUlJDvsgqF69eqHzmTw9PVWrVi2Hnef03HPP6f7771fDhg117NgxxcbGytnZWZGRkQ6pJ0nPPPOMunTpopdeekmDBg3Sjh07tGjRIi1atMhhNaXf/tFYsmSJoqKi5OLi2I+x+++/Xy+++KIaNGigVq1aadeuXZozZ45GjBjhsJobNmyQYRhq3ry5fvzxR40fP15BQUF2+wy40e/7008/rb///e9q1qyZGjVqpKlTp6pu3boKDw93WM1ff/1V6enp1nvxXA3W/v7+pdrzdL16AQEBGjhwoFJTU7V27Vrl5+dbP4Nq1qwpV1dXu29jrVq19OKLL6pfv34KCAjQqVOntGDBAh09erTUt3C40Xv6x1BXpUoV+fv7q3nz5qWqd6OaNWvW1IwZM/Tggw/K399fP/30kyZMmKCmTZsqLCys1DUdpoKvQsM1vPHGG0aDBg0MV1dXo2PHjsa2bdscWm/Tpk2GpEKPqKgoh9QrqpYkY8mSJQ6pN2LECKNhw4aGq6urUbt2baNnz57Gf/7zH4fUuhZHXwY/ePBgIyAgwHB1dTXq1atnDB482Pjxxx8dVu+qTz/91GjdurXh5uZmBAUFGYsWLXJ4zQ0bNhiSjP379zu8Vk5OjjFu3DijQYMGhru7u9G4cWPjb3/7m3Hx4kWH1Vy5cqXRuHFjw9XV1fD39zfGjBljnDlzxm7z3+j3vaCgwJg6darh5+dnuLm5GT179izze32jmkuWLClyfWxsrN3rXb3UvqjHpk2bHLKN58+fNx544AGjbt26hqurqxEQEGD069fP2LFjh0PqFcUel8Ffr2ZeXp7Rq1cvo3bt2kaVKlWMhg0bGqNHjzYyMjLKVNNRLIbhwNuZAgAAVEKcAwQAAEyHAAQAAEyHAAQAAEyHAAQAAEyHAAQAAEyHAAQAAEyHAAQAAEyHAATAlCwWi9asWVPRbQCoIAQgAJXG8OHDZbFYZLFYVKVKFTVq1EgTJkzQhQsXKro1LV26VDVq1LB5frVXZ2dn+fj4qFOnTnrhhReUnZ1dcY0CKBYCEIBKpXfv3jp+/Lh+/vlnvfbaa3rrrbcUGxtb0W0VycvLS8ePH9eRI0e0detWPfroo3rnnXfUtm1b63daAaicCEAAKhU3Nzf5+/urfv36Cg8PV2hoqBISEqzrs7KyFBkZqXr16snDw0PBwcF6//33bea4++679dRTT2nChAmqWbOm/P39NX369OvWjY2NVUBAgL799tti92qxWOTv76+AgAC1aNFCI0eO1NatW3Xu3DlNmDChRNsNoHwRgABUWnv27NHWrVttvp37woULat++vT777DPt2bNHjz76qIYOHaodO3bYvHbZsmXy9PTU9u3bNWvWLL3wwgs2QeoqwzD05JNP6p133tHmzZvVpk2bMvVcp04dPfLII/rkk0+Un59fprkAOI5LRTcAAL+3du1aVatWTVeuXNHFixfl5OSk+fPnW9fXq1dPzz33nPX5k08+qQ0bNuiDDz5Qx44drcvbtGljPXTWrFkzzZ8/X4mJibr33nutY65cuaIhQ4Zo165d2rJli+rVq2eXbQgKCtLZs2eVlZWlOnXq2GVOAPZFAAJQqdxzzz168803lZubq9dee00uLi568MEHrevz8/P10ksv6YMPPtDRo0d16dIlXbx4UR4eHjbz/HFPTkBAgE6ePGmz7JlnnpGbm5u2bdsmX19fu22DYRiSfjtEBqBy4hAYgErF09NTTZs2VUhIiOLj47V9+3YtXrzYuv7VV1/VvHnzNHHiRG3atElpaWkKCwvTpUuXbOapUqWKzXOLxaKCggKbZffee6+OHj2qDRs22HUb9u7dKy8vL9WqVcuu8wKwHwIQgErLyclJkydP1pQpU3T+/HlJ0ldffaX+/ftryJAhCgkJUePGjfXDDz+Uav5+/fpp+fLlGjVqlFasWGGXnk+ePKnly5crPDxcTk58xAKVFb+dACq1iIgIOTs7a8GCBZJ+O58nISFBW7du1d69e/XXv/5VJ06cKPX8DzzwgN59911FR0frww8/LNFrDcNQRkaGjh8/rr179yo+Pl5dunSRt7e3Xn755VL3BMDxOAcIQKXm4uKisWPHatasWXr88cc1ZcoU/fzzzwoLC5OHh4ceffRRhYeHl+nmgwMHDlRBQYGGDh0qJycnDRgwoFivy8nJUUBAgCwWi7y8vNS8eXNFRUVp3Lhx8vLyKnU/ABzPYlw9Ww8AAMAkOAQGAABMhwAEAABMhwAEAABMhwAEAABMhwAEAABMhwAEAABMhwAEAABMhwAEAABMhwAEAABMhwAEAABMhwAEAABMhwAEAABM5/8BtX0lvKMuBToAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# 设置展示图大小\n", - "fig, ax = plt.subplots(figsize=(10,8))\n", - "\n", - "x = np.arange(len(rank_ids)) # the label locations\n", - "\n", - "rects1 = ax.bar(x - width/2, sdma_bw, width, label='SDMA')\n", - "rects2 = ax.bar(x + width/2, rdma_bw, width, label='RDMA')\n", - "\n", - "# Add some text for labels, title and custom x-axis tick labels, etc.\n", - "ax.set_ylabel('Bandwidth(GB/s)')\n", - "ax.set_xlabel('Rank ID')\n", - "ax.set_title('Transport Bandwidth')\n", - "ax.set_xticks(x)\n", - "ax.set_xticklabels(rank_ids)\n", - "ax.legend()\n", - "print(words)" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "77d6efa1-48e3-409f-82c4-3e2b3d868898", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "RDMA bandwidth(GB/s): \n", - "The average is 0.041, while the maximum is 0.041GB/s and the minimum is 0.041GB/s. the difference is 0.0GB/s. \n", - "SDMA bandwidth(GB/s): \n", - "The average is 0.054, while the maximum is 0.056GB/s and the minimum is 0.052GB/s. the difference is 0.003GB/s. \n" - ] - } - ], - "source": [ - "print(dataset.get('bottleneck'))" - ] - }, - { - "cell_type": "markdown", - "id": "ce27a1d3-1354-45f7-88d8-dcb8e438b2b2", - "metadata": {}, - "source": [ - "## 3) 分布式卡上的kernel算子统计展示" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "e05774e9-c47e-400f-8421-b4b71bcdcbc4", - "metadata": {}, - "outputs": [], - "source": [ - "dataset = interface.get_data('cluster', 'kernel')" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "e95b6849-1738-4975-929f-734edff5d1c1", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
rank idNameInput ShapesInput Data TypesOutput ShapesDuration(us)_meanDuration(us)_varDuration(us)_maxDuration(us)_minDuration(us)_countDuration(us)_sum
00Add\"1024,2,5120;1024,2,5120\"DT_BF16;DT_BF16\"1024,2,5120\"45.01205082.95274855.925535.310816720.1928
10Add\"2,8192,5120;2,8192,5120\"DT_BF16;DT_BF16\"2,8192,5120\"447.183700NaN447.1837447.18371447.1837
20Add\"8192,2,1920;1920\"DT_BF16;DT_BF16\"8192,2,1920\"54.3308501.34284655.245652.64634217.3234
30Add\"8192,2,2560;2560\"DT_BF16;DT_BF16\"8192,2,2560\"75.4853750.76131576.280274.24074301.9415
40Add\";\"FLOAT;FLOAT\"\"1.2008840.0172571.49960.95975060.0442
....................................
144115atomic_memset-1_67_1998432_1_0\"\"UNDEFINED\"\"3.160000NaN3.16003.160013.1600
144215trans_Cast_14\"1\"FLOAT\"1\"1.3900000.0230671.60001.260045.5600
144315trans_Cast_15\"\"INT32\"\"64.44500036.27610070.300059.20004257.7800
144415trans_Cast_4\"1\"FLOAT\"1\"1.5550000.0358571.94001.3200812.4400
144515trans_Cast_5\"\"INT32\"\"62.89500015.58420069.860056.76008503.1600
\n", - "

1446 rows × 11 columns

\n", - "
" - ], - "text/plain": [ - " rank id Name Input Shapes \\\n", - "0 0 Add \"1024,2,5120;1024,2,5120\" \n", - "1 0 Add \"2,8192,5120;2,8192,5120\" \n", - "2 0 Add \"8192,2,1920;1920\" \n", - "3 0 Add \"8192,2,2560;2560\" \n", - "4 0 Add \";\" \n", - "... ... ... ... \n", - "1441 15 atomic_memset-1_67_1998432_1_0 \"\" \n", - "1442 15 trans_Cast_14 \"1\" \n", - "1443 15 trans_Cast_15 \"\" \n", - "1444 15 trans_Cast_4 \"1\" \n", - "1445 15 trans_Cast_5 \"\" \n", - "\n", - " Input Data Types Output Shapes Duration(us)_mean Duration(us)_var \\\n", - "0 DT_BF16;DT_BF16 \"1024,2,5120\" 45.012050 82.952748 \n", - "1 DT_BF16;DT_BF16 \"2,8192,5120\" 447.183700 NaN \n", - "2 DT_BF16;DT_BF16 \"8192,2,1920\" 54.330850 1.342846 \n", - "3 DT_BF16;DT_BF16 \"8192,2,2560\" 75.485375 0.761315 \n", - "4 FLOAT;FLOAT \"\" 1.200884 0.017257 \n", - "... ... ... ... ... \n", - "1441 UNDEFINED \"\" 3.160000 NaN \n", - "1442 FLOAT \"1\" 1.390000 0.023067 \n", - "1443 INT32 \"\" 64.445000 36.276100 \n", - "1444 FLOAT \"1\" 1.555000 0.035857 \n", - "1445 INT32 \"\" 62.895000 15.584200 \n", - "\n", - " Duration(us)_max Duration(us)_min Duration(us)_count Duration(us)_sum \n", - "0 55.9255 35.3108 16 720.1928 \n", - "1 447.1837 447.1837 1 447.1837 \n", - "2 55.2456 52.6463 4 217.3234 \n", - "3 76.2802 74.2407 4 301.9415 \n", - "4 1.4996 0.9597 50 60.0442 \n", - "... ... ... ... ... \n", - "1441 3.1600 3.1600 1 3.1600 \n", - "1442 1.6000 1.2600 4 5.5600 \n", - "1443 70.3000 59.2000 4 257.7800 \n", - "1444 1.9400 1.3200 8 12.4400 \n", - "1445 69.8600 56.7600 8 503.1600 \n", - "\n", - "[1446 rows x 11 columns]" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "27b75df4-792b-43dc-aa5c-d3c265642c1e", - "metadata": {}, - "outputs": [], - "source": [ - "# 保存到csv查看, 可修改保存路径\n", - "dataset.to_csv('cluster_kernel_details.csv', index=False, sep='\\t')" - ] - }, - { - "cell_type": "markdown", - "source": [ - "## 4) 展示集群流水并行图\n", - "使用说明: \n", - "1). 需要使用Ascend Torch Profiler采集数据,如果需要展示FP和BP需要将activities设置为采集CPU和NPU \n", - "2). rank_ids为要展示的rank id列表,必选参数, 可视化顺序与rank_ids的顺序一致 \n", - "3). worker_num为多进程数量,可选参数,请根据机器配置调整,默认值为机器可用核心数的一半 \n", - "4). 如果没有采集CPU数据,则展示Stage和Bubble的流水图 \n", - "5). 生成的json文件可以在chrome trace中查看 \n", - "\n", - "示例图:\n", - "![pipeline_view](../../profiler/test/resource/pipeline_view.png)" - ], - "metadata": { - "collapsed": false - }, - "id": "ae45826394463cc4" - }, - { - "cell_type": "code", - "outputs": [], - "source": [ - "import json\n", - "\n", - "# rank_ids为要呈现的rank id列表,必选参数\n", - "# 可以使用列表推导式生成需要的rank_ids,最终展示顺序和rank_ids的顺序一致\n", - "# worker_num为多进程数量,可选参数,请根据机器配置调整,默认值为机器可用核心数的一半\n", - "dataset = interface.get_data(\"cluster\", \"pipeline\", rank_ids=[0, 1, 2, 3, 4, 5, 6, 7], worker_num=8)\n", - "\n", - "# 保存json数据,在chrome trace中查看\n", - "with open(\"./pipeline_view.json\", \"w\") as f:\n", - " json.dump(dataset.get(\"data\", []), f)" - ], - "metadata": { - "collapsed": false - }, - "id": "baf66781eccfbca1" - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.7" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/profiler/advisor/common/__init__.py b/profiler/advisor/common/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/common/constant.py b/profiler/advisor/common/constant.py new file mode 100644 index 0000000000..9703e78c00 --- /dev/null +++ b/profiler/advisor/common/constant.py @@ -0,0 +1,106 @@ +# timeline +DEQUEUE = "Dequeue" +DEQUEUE_SEP = "@" +ATEN = "aten" +NPU = "npu" +ATEN_SEP = "::" +OPTIMIZER = "Optimizer" +OPTIMIZER_SEP = "#" +OPTIMIZER_STEP = "step" +ENQUEUE = "enqueue" +TORCH_TO_NPU = "torch_to_npu" +OP_COMPILE_NAME = "AscendCL@aclopCompileAndExecute" +OP_COMPILE_ID = "aclopCompileAndExecute" +MAX_OP_COMPILE_NUM = 20 +ACL_TO_NPU = "acl_to_npu" +TASK_TYPE = "Task Type" +CPU_OP = "cpu_op" +AI_CORE = "AI_CORE" +AI_CPU = "AI_CPU" +CALL_STACKS = "Call stack" +INPUT_DIMS = "Input Dims" +OP_SEP = "-" +MA_ADVISOR_MAX_PROCESSES = 16 +MA_ADVISOR_ANALYZE_PROCESSES = "MA_ADVISOR_ANALYZE_PROCESSES" +TIMELINE_OP_STACKS_DATASET = "timeline_op_stacks_dataset" +TIMELINE_BACKWARD_NO_STACK = "Backward broadcast, without call stacks in profiling." +TIMELINE_ACL_TO_NPU_NO_STACK = "Incoming flow is 'acl_to_npu', without call stacks in profiling." +TIMELINE_BACKWARD_NO_STACK_CODE = -1 +TIMELINE_ACL_TO_NPU_NO_STACK_CODE = -2 +TIMELINE_FUSION_OPS_NO_STACK_FLAG = "NO STACK" +NO_STACK_REASON_MAP = { + TIMELINE_BACKWARD_NO_STACK_CODE: "Backward broadcast, without call stacks in profiling.", + TIMELINE_ACL_TO_NPU_NO_STACK_CODE: "Incoming flow is 'acl_to_npu', without call stacks in profiling." +} +TIMELINE_API_DOC_URL = "https://3ms.huawei.com/hi/group/3942456/wiki_7680982.html" +AFFINITY_TRAINING_API = "Affinity training api" +TIMELINE_WITH_STACK_DOC_URL = "https://www.hiascend.com/document/detail/zh/canncommercial/" \ + "70RC1/modeldevpt/ptmigr/AImpug_0067.html" +PyTorch_AOE_OPERATOR_TUNE_URL = "https://www.hiascend.com/document/detail/zh/canncommercial/" \ + "70RC1/devtools/auxiliarydevtool/aoe_16_045.html" +MSLite_Infer_AOE_OPEATOR_TUNE_URL = "https://www.mindspore.cn/lite/docs/en/master/use/cloud_infer/converter_tool_ascend.html#aoe-auto-tuning" +ENABLE_COMPILED_TUNE_URL = "https://www.hiascend.com/document/detail/zh/canncommercial/" \ + "70RC1/modeldevpt/ptmigr/AImpug_0059.html" + +ASCEND_PROFILER_URL = "https://www.hiascend.com/document/detail/zh/canncommercial/70RC1/modeldevpt/ptmigr/AImpug_0067.html" +TIMELINE_EMPTY_STACKS_PROMPT = "These APIs have no code stack. If parameter 'with_stack=False' while profiling, " \ + "please refer to {timeline_profiling_doc_url} to set 'with_stack=True'. " \ + "Otherwise, ignore following affinity APIs due to backward broadcast lack of stack." + +CLUSTER_ANALYSIS = "Cluster analysis" +SLOW_RANK_TIME_RATIO_THRESHOLD = 0.05 + +# version_control +CANN_VERSION_C30 = '6.3.RC2' +CANN_VERSION_C13 = '7.0.RC1' +CANN_VERSION_C15 = '7.0.0' +CANN_VERSION_C17 = '8.0.0' +SUPPORTED_CANN_VERSION = [CANN_VERSION_C30, CANN_VERSION_C13, CANN_VERSION_C15, CANN_VERSION_C17] +DEFAULT_CANN_VERSION = CANN_VERSION_C15 +ASCEND_PYTORCH_PROFILER = "ascend_pytorch_proflier" +MSLITE = "mslite" +MSPROF = "msprof" +SUPPORTED_PROFILING_TYPE = [ASCEND_PYTORCH_PROFILER, MSLITE, MSPROF] +DEFAULT_PROFILING_TYPE = ASCEND_PYTORCH_PROFILER +TORCH_VERSION_1_11_0 = '1.11.0' +TORCH_VERSION_2_1_0 = '2.1.0' + +SUPPORTED_TORCH_VERSION = [TORCH_VERSION_1_11_0, TORCH_VERSION_2_1_0] +DEFAULT_TORCH_VERSION = TORCH_VERSION_2_1_0 + +TERMINAL_OUTPUT_HEADERS = ["No.", "Problem", "Description", "Suggestion"] +SKIP_ANALYZE_PROMPT = "Finish analysis, no optimization suggestions" +SKIP_QUERY_PROMPT = "Finish query operator stack, no operators" + +# operator output constant +OPERATOR_OUT_TOPK = 10 +OPERATOR_LIST_UNLIMIT = -1 + +DEFAULT_OPERATOR_TYPE = 'None_type' +DEFAULT_DURATION_ZERO = 0.0 + +ADVISOR_LOG_LEVEL = "ADVISOR_LOG_LEVEL" +DEFAULT_LOG_LEVEL = "INFO" +SUPPORTED_LOG_LEVEL = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] + +CLOUD_RULE_REGION_CN_NORTH_9 = "cn-north-9" +CLOUD_RULE_REGION_CN_NORTH_7 = "cn-north-7" +CLOUD_RULE_REGION_CN_SOUTHWEST_2 = "cn-southwest-2" +CLOUD_RULE_REGION_LIST = [CLOUD_RULE_REGION_CN_NORTH_7, CLOUD_RULE_REGION_CN_NORTH_9, CLOUD_RULE_REGION_CN_SOUTHWEST_2] +DEFAULT_CLOUD_RULE_REGION = CLOUD_RULE_REGION_CN_SOUTHWEST_2 + +AICPU_RULES_YAML_NAME = "aicpu_rules.yaml" +FUSSION_PASS_YAML_NAME = "op_fussion_pass.yaml" +TIMELINE_FUSION_OPS_YAML_NAME = "timeline_fusion_ops.yaml" +CLOUD_YAML_NAME_LIST = [AICPU_RULES_YAML_NAME, FUSSION_PASS_YAML_NAME, TIMELINE_FUSION_OPS_YAML_NAME] + +MAX_RETRIES = 3 +TIMEOUT = 3 + +ADVISOR_RULE_PATH = "ADVISOR_RULE_PATH" +CLOUD_RULE_PATH = "rules/cloud/" +DEFAULT_RULE_PATH = "./rules/" + +TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID = -1 + +DEFAULT_TEMPLATE_HEADER = "Performance Optimization Suggestions" diff --git a/profiler/advisor/common/module_lib.py b/profiler/advisor/common/module_lib.py new file mode 100644 index 0000000000..697e37f736 --- /dev/null +++ b/profiler/advisor/common/module_lib.py @@ -0,0 +1,87 @@ +import logging + +from profiler.advisor.analyzer.scheduling.fusion_ops.fusion_ops_analyzer import TimelineFusionOpsAnalyzer +from profiler.advisor.analyzer.overall.overall_analyzer import OverallSummaryAnalyzer + +from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset + +logger = logging.getLogger() + + +class AnalysisScope: + supported_dims = ["computing", "scheduling", "communication", "overall", "dataloader"] + + @staticmethod + def get_analyzer(dimension, analyzer_name, is_inference=False): + if is_inference: + return getattr(InferenceAnalysisScope, dimension)().get(analyzer_name) + return getattr(TrainAnalysisScope, dimension)().get(analyzer_name) + + @staticmethod + def analyzer_list(dim=None, is_inference=False): + analyzer_list = [] + dims = [dim] if dim else AnalysisScope.supported_dims + for dim in dims: + analyzer_list += list(getattr(InferenceAnalysisScope, dim)().keys()) if is_inference else list( + getattr(TrainAnalysisScope, dim)().keys()) + return analyzer_list + + +class TrainAnalysisScope(AnalysisScope): + + @staticmethod + def computing(): + return dict() + + @staticmethod + def scheduling(): + return dict( + timeline_fusion_ops=TimelineFusionOpsAnalyzer + ) + + @staticmethod + def communication(): + return dict() + + @staticmethod + def overall(): + return dict( + overall_summary=OverallSummaryAnalyzer + ) + + @staticmethod + def dataloader(): + return dict() + + +class InferenceAnalysisScope(AnalysisScope): + @staticmethod + def computing(): + return dict() + + @staticmethod + def scheduling(): + return dict() + + @staticmethod + def communication(): + return dict() + + @staticmethod + def overall(): + return dict() + + @staticmethod + def dataloader(): + return dict() + + +class AnalyzerToDataset: + analyzer_to_dataset = { + "overall_summary": [], + "timeline_fusion_ops": [TimelineEventDataset] + } + + @staticmethod + def get_dataset(analyzer_name): + return AnalyzerToDataset.analyzer_to_dataset.get(analyzer_name) diff --git a/profiler/advisor/common/timeline/__init__.py b/profiler/advisor/common/timeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/common/timeline/event.py b/profiler/advisor/common/timeline/event.py new file mode 100644 index 0000000000..8eebc550d0 --- /dev/null +++ b/profiler/advisor/common/timeline/event.py @@ -0,0 +1,23 @@ +class AdvisorDict(dict): + def __getstate__(self): + return self.__dict__ + + def __setstate__(self, d): + self.__dict__.update(d) + + def __getattr__(self, key: str): + if key not in self: + return {} + + value = self[key] + if isinstance(value, dict): + value = AdvisorDict(value) + return value + + +class TimelineEvent(AdvisorDict): + + def ts_include(self, event): + + return float(self.ts) <= float(event.ts) and float(self.ts) + float(self.dur) >= float(event.ts) + float( + event.dur) \ No newline at end of file diff --git a/profiler/advisor/common/timeline/fusion_ops_db.py b/profiler/advisor/common/timeline/fusion_ops_db.py new file mode 100644 index 0000000000..19a86437e0 --- /dev/null +++ b/profiler/advisor/common/timeline/fusion_ops_db.py @@ -0,0 +1,555 @@ +import copy +import logging +import os + +import yaml + +from profiler.advisor.common import constant as const +from profiler.advisor.utils.log import get_log_level + +logger = logging.getLogger() +logger.setLevel(get_log_level()) + + +class TimelineOpRuleHandler: + """基于线性规划思想保存OpRule,用于局部继承、全局继承等功能""" + + def __init__(self): + self._db_content = None + # 具体生成的timeline规则,key为unique_id + self._all_tmp_timeline_op_rule = {} + # 所有timeline规则的dict集合,key为unique_id + self._all_origin_timeline_op_rule_dict = {} + # 已生成timeline规则的id数组 + self._exist_timeline_op_rule_unique_id_list = [] + + @staticmethod + def _get_local_inherit_id_list(op_rule: dict): + local_inherit_id_list = [] + for _, val in op_rule.items(): + if val.get("inherit_unique_id") is not None: + local_inherit_id_list.append(val.get("inherit_unique_id")) + return local_inherit_id_list + + @staticmethod + def _is_duplicated_element_in_lists(list_a, list_b): + """检查两个数组中是否存在重复的元素,若有任意元素重复,返回True""" + if not isinstance(list_a, list): + list_a = [list_a] + if not isinstance(list_b, list): + list_b = [list_b] + for element in list_a: + if element in list_b: + return True + return False + + def set_db_content(self, db_content): + # 过滤非 dict 格式, 或 dict 中没有定义 unique_id 的数据, 并保存到 _all_origin_timeline_op_rule_dict 中 + self._db_content = copy.deepcopy(db_content) + for rule_dic in self._db_content: + if not isinstance(rule_dic, dict) or rule_dic.get("unique_id") is None: + continue + self._all_origin_timeline_op_rule_dict[rule_dic.get("unique_id")] = rule_dic + if self._all_origin_timeline_op_rule_dict: + self.generate_all_timeline_op_rule() + + def generate_basic_timeline_op_rules(self): + """用于实现获取无全局继承规则, 无全局继承的规则认为是基础版本规则, 默认不会存在局部继承""" + for _, rule_dic in self._all_origin_timeline_op_rule_dict.items(): + if rule_dic.get("inherit_unique_id") is None: + self.add_basic_timeline_op_rule(rule_dic) + + def add_basic_timeline_op_rule(self, rule_dic): + # 若基础规则中存在局部继承的规则,则跳过 + local_inherit_id_list = self._get_local_inherit_id_list(rule_dic.get("operator_rules")) + if local_inherit_id_list: + return + + temp_rule = OpRule() + temp_rule.merge(rule_dic.get("operator_rules")) + + unique_id = rule_dic.get("unique_id") + logger.debug("The rule of version %s is basic rule.", unique_id) + self.add_new_timeline_op_rule(unique_id, temp_rule.tmp_rule) + + def add_empty_timeline_op_rule(self, unique_id): + if self._all_origin_timeline_op_rule_dict.get(unique_id) is None: + self._all_origin_timeline_op_rule_dict[unique_id] = {} + tmp_rule = {} + logger.debug("The rule of version %s is empty.", unique_id) + self.add_new_timeline_op_rule(unique_id, tmp_rule) + + def add_new_timeline_op_rule(self, unique_id, tmp_rule): + if unique_id not in self._exist_timeline_op_rule_unique_id_list: + self._exist_timeline_op_rule_unique_id_list.append(unique_id) + self._all_tmp_timeline_op_rule[unique_id] = tmp_rule + logger.debug("The rule of version %s is successfully generated.", unique_id) + + def generate_specified_list_timeline_op_rule(self, specified_unique_id_list, kid_id_list=None): + for specified_unique_id in specified_unique_id_list: + if specified_unique_id in self._exist_timeline_op_rule_unique_id_list: + self.generate_specified_timeline_op_rule(specified_unique_id, kid_id_list) + + def generate_specified_timeline_op_rule(self, specified_unique_id, kid_id_list=None): + """用于实现生成特定版本规则 + + 若不存在相应specified_unique_id的规则、或是已生成、循环继承等情况,将该规则置空并返回 + 规则库文件结构设置为多叉树, 结构决定了不断向下搜索最终应该是从基础版本开始继承, 递归生成, + 直到specified_unique_id规则依赖继承的规则库全部生成完毕, 再生成该指定规则库, 将specified_unique_id的规则库归档 + + 参数: + specified_unique_id: 指定版本规则id + kid_id_list: 子规则id数组, 用于防止循环继承, 如间接继承自身或直接继承自身等情况 + 返回: + None + """ + if kid_id_list is None: + kid_id_list = [] + + # 若该unique_id规则在timeline_fusion_ops.yaml中没有相应的规则, 生成该id规则,置为空 + if self._all_origin_timeline_op_rule_dict.get(specified_unique_id) is None: + logger.warning("The specified version %s does not exist in the rule library. " + "Ensure that the corresponding rule is configured in the YAML file. " + "The version %s is left blank.", + specified_unique_id, + specified_unique_id) + self.add_empty_timeline_op_rule(specified_unique_id) + return + + # 若该unique_id规则已经生成,则无需再次生成 + if specified_unique_id in self._exist_timeline_op_rule_unique_id_list: + logger.warning("The rule has been generated and does not need to be generated again. " + "Check whether unique id %s in the YAML file is duplicate.", + specified_unique_id) + return + + # 若kid_id_list不为空,且间接继承自身,则尝试生成空规则用于继承 + if kid_id_list and self._is_duplicated_element_in_lists(specified_unique_id, kid_id_list): + logger.warning("It cannot be inherited indirectly. Ensure that the corresponding rules are correctly " + "configured in the YAML file and leave Version %s blank.", + specified_unique_id) + self.add_empty_timeline_op_rule(specified_unique_id) + return + + rule_dic = self._all_origin_timeline_op_rule_dict.get(specified_unique_id) + if rule_dic is not None: + kid_id_list.append(specified_unique_id) + + global_inherit_id = rule_dic.get("inherit_unique_id") + if global_inherit_id and global_inherit_id not in self._exist_timeline_op_rule_unique_id_list: + logger.debug("The rule of version %s global inherit the rule of version %s", + specified_unique_id, global_inherit_id) + self.generate_specified_timeline_op_rule(global_inherit_id, kid_id_list) + + # 若局部继承的规则未生成, 生成该规则 + local_inherit_id_list = self._get_local_inherit_id_list(rule_dic.get("operator_rules")) + if local_inherit_id_list: + logger.debug("The rule of version %s local inherit the rule of version %s", + specified_unique_id, local_inherit_id_list) + self.generate_specified_list_timeline_op_rule(specified_unique_id_list=local_inherit_id_list, + kid_id_list=kid_id_list) + logger.debug("Start to generate rule of version %s", specified_unique_id) + # 实现全局继承与局部继承 + temp_rule = OpRule(timeline_op_rule_handler=self, + rule=self._all_tmp_timeline_op_rule.get(global_inherit_id)) + temp_rule.merge(rule_dic.get("operator_rules")) + # 将生成的规则归档保存 + self.add_new_timeline_op_rule(specified_unique_id, temp_rule.tmp_rule) + return + logger.error("Failed to generate the rule whose unique_id is %s. Ensure that the rule is configured in " + "the YAML file and the version %s is empty.", specified_unique_id, specified_unique_id) + self.add_empty_timeline_op_rule(specified_unique_id) + + def generate_all_timeline_op_rule(self): + """用于实现获取所有版本规则 + + 查找db_content中的规则库, 规则库文件结构设置为多叉树, 优先生成无继承的基础规则版本 + 循环并生成其他版本, 文件结构决定了不断向下搜索最终应该是从基础版本开始继承, 递归生成,直到全部规则库生成后退出函数 + + 参数: + None + 返回: + None + """ + self.generate_basic_timeline_op_rules() + _unique_id_list = copy.deepcopy(list(self._all_origin_timeline_op_rule_dict.keys())) + for unique_id in _unique_id_list: + if unique_id in self._exist_timeline_op_rule_unique_id_list: + continue + self.generate_specified_timeline_op_rule(unique_id) + + def get_tmp_timeline_op_rule_with_unique_id(self, unique_id): + if unique_id not in self._exist_timeline_op_rule_unique_id_list: + logger.error("The specified unique_id does not exist in the rule library. Ensure that the " + "corresponding rule is configured in the YAML file and the version %s is empty." + "If the value of unique_id is a negative number, the version may not be supported.", + unique_id) + self.add_empty_timeline_op_rule(unique_id) + if unique_id < 0: + logger.error("Advise to use a positive integer as the unique id of rules. " + "Negative numbers: %s are not recommended to use as unique id. " + "If specified invalid unique id: %s is used, an empty rule is returned by default.", + unique_id, const.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID) + return self._all_tmp_timeline_op_rule.get(unique_id) + + +class OpRule: + + def __init__(self, rule=None, timeline_op_rule_handler=None): + if rule is None: + self._tmp_rule = {} + else: + self._tmp_rule = copy.deepcopy(rule) + if timeline_op_rule_handler is None: + self.timeline_op_rule_handler = {} + else: + self.timeline_op_rule_handler = copy.deepcopy(timeline_op_rule_handler) + self._rule = {} + + @property + def tmp_rule(self): + return self._tmp_rule + + @staticmethod + def _format_rule(rule): + """格式化规则函数, 将额外规则格式化为{key,数组list}形式, 使得yaml文件中operator_rules若写成key:str形式也能正常读取""" + format_rule = {} + for key, val in rule.items(): + if not isinstance(val, list): + val = [val] + format_rule[key] = val + return format_rule + + def merge(self, extra_rule): + """合并函数, 将已有规则库与额外规则合并, 若无继承则已有规则库应为空""" + for key, val in extra_rule.items(): + for func, op_rules in val.items(): + try: + getattr(self, f"{func}")(key, op_rules) + except AttributeError: + logger.error("Undefined field and function name. Ensure that %s is correct in the rule " + "library.", func) + + def get_final_rules(self): + """获取最终的规则库""" + self._restore_rule() + return self._rule + + def add(self, key, add_rules: dict): + """新增函数, 新增已有规则库不存在的额外规则""" + if add_rules is None: + return + if self._tmp_rule.get(key) is None: + self._tmp_rule[key] = {} + format_add_rule = self._format_rule(add_rules) + for add_key, add_val in format_add_rule.items(): + logger.debug("add: %s: %s", add_key, add_val) + if add_key not in self._tmp_rule: + self._tmp_rule[key][add_key] = add_val + else: + logger.warning("This key has been written to the rule, " + "%s: %s should be written in the overwrite section", add_key, add_val) + self._tmp_rule[key][add_key].update(add_val) + + def overwrite(self, key, overwrite_rules: dict): + """重写函数, 重写已有规则库中已经存在的规则""" + if overwrite_rules is None: + return + if self._tmp_rule.get(key) is None: + self._tmp_rule[key] = {} + format_overwrite_rules = self._format_rule(overwrite_rules) + for overwrite_key, overwrite_val in format_overwrite_rules.items(): + logger.debug("overwrite: %s: %s", overwrite_key, overwrite_val) + if overwrite_key not in self._tmp_rule: + logger.warning("This key is not written to the rule. " + "%s: %s should be written in the add section", overwrite_key, overwrite_val) + self._tmp_rule[key][overwrite_key] = overwrite_val + else: + self._tmp_rule[key][overwrite_key].update(overwrite_val) + + def exclude(self, key, exclude_rules: list): + """除外函数, 将已有规则库已有的规则除外删除""" + if exclude_rules is None: + return + for exclude_key in exclude_rules: + logger.debug("exclude: %s", exclude_key) + if isinstance(exclude_key, str): + if exclude_key not in self._tmp_rule[key]: + logger.warning("This key is not written to the rule. " + "do not need to exclude: %s.", exclude_key) + continue + self._tmp_rule[key].pop(exclude_key) + else: + logger.warning("Error type rule in exclude: %s", exclude_key) + + def inherit_unique_id(self, key, inherit_unique_id): + """局部继承函数, 将规则库中指定unique_id版本覆盖指定位置""" + result_rule = self.timeline_op_rule_handler.get_tmp_timeline_op_rule_with_unique_id(inherit_unique_id) + if result_rule is not None and result_rule.get(key) is not None: + self._tmp_rule[key] = copy.deepcopy(result_rule.get(key)) + return + logger.error("Rule library version %s does not exist. ", inherit_unique_id) + + def _restore_rule(self): + for key, op_api_map in self._tmp_rule.items(): + self._rule[key] = [{op_combined: api} for op_combined, api in op_api_map.items()] + + +def get_file_path_by_walk(root, filename): + file_path = "" + for root, _, files in os.walk(root, topdown=True): + for name in files: + if name == filename: + file_path = os.path.join(root, name) + return file_path + return file_path + + +def get_timeline_fusion_ops_yaml_path(): + # 环境变量 ADVISOR_RULE_PATH 不为空且该路径存在, os.walk遍历其下文件, 若存在相应的规则文件则返回路径 + advisor_rule_path = os.getenv(const.ADVISOR_RULE_PATH) + if advisor_rule_path and os.path.exists(advisor_rule_path): + specified_file_path = get_file_path_by_walk(advisor_rule_path, const.TIMELINE_FUSION_OPS_YAML_NAME) + if len(specified_file_path.strip()) and os.path.exists(specified_file_path): + logger.debug("Successfully find The %s file which is specified by the environment variable: %s.", + specified_file_path, const.ADVISOR_RULE_PATH) + return specified_file_path + logger.warning("The %s does not exist in path: %s. Try to use cloud or default local YAML file.", + const.TIMELINE_FUSION_OPS_YAML_NAME, os.path.normpath(advisor_rule_path)) + # 检查云文件默认保存路径文件夹下是否存在相应文件, 默认路径 ~/rules/cloud/ + cloud_file_path = os.path.join(os.path.expanduser("~"), const.CLOUD_RULE_PATH, const.TIMELINE_FUSION_OPS_YAML_NAME) + if os.path.exists(cloud_file_path): + logger.debug("Successfully find The cloud %s file in %s.", const.TIMELINE_FUSION_OPS_YAML_NAME, + cloud_file_path) + return cloud_file_path + # 检查本地默认文件 + local_file_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), + const.DEFAULT_RULE_PATH, const.TIMELINE_FUSION_OPS_YAML_NAME) + if not os.path.exists(local_file_path): + # 若本地默认文件不存在, 则log异常信息并 + logger.error("The default local YAML file does not exist. Please check the YAML file in the default path %s.", + local_file_path) + return local_file_path + + +class FusionOperatorDB: + + def __init__(self, file_path=None, cann_version=None, torch_version=None): + self.timeline_fusion_ops_yaml_path = os.path.normpath(get_timeline_fusion_ops_yaml_path()) + + self.cann_version = cann_version or const.DEFAULT_CANN_VERSION + self.torch_version = torch_version or const.DEFAULT_TORCH_VERSION + + self._supported_version_dict = {} + + self.is_empty = False + self.timeline_op_rule_handler = TimelineOpRuleHandler() + self.fusion_operator = self._load_yaml(self.timeline_fusion_ops_yaml_path) + + self._dequeue_op_names = [] + self._aten_op_names = [] + self._optimizer_op_names = [] + self._dequeue_op_api_map = {} + self._aten_op_api_map = {} + self._optimizer_op_api_map = {} + self._parse_db() + + @property + def dequeue_op_names(self): + return self._dequeue_op_names + + @property + def aten_op_names(self): + return self._aten_op_names + + @property + def optimizer_op_names(self): + return self._optimizer_op_names + + @property + def dequeue_op_api_map(self): + return self._dequeue_op_api_map + + @property + def aten_op_api_map(self): + return self._aten_op_api_map + + @property + def optimizer_op_api_map(self): + return self._optimizer_op_api_map + + def get_fusion_operator_with_unique_id(self, unique_id): + if unique_id == const.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID: + logger.warning("The specified unique id: %s is invalid.Please check whether the rule of the unique id " + "exists and modify the rule.", const.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID) + return {} + result_tmp_rule = self.timeline_op_rule_handler.get_tmp_timeline_op_rule_with_unique_id(unique_id) + result_op_rule = OpRule(result_tmp_rule) + return result_op_rule.get_final_rules() + + def regenerate_timeline_op_rule_with_unique_id(self, unique_id): + self.fusion_operator.clear() + logger.debug("Program try to regenerate the rule to version %s.", unique_id) + self.fusion_operator = self.get_fusion_operator_with_unique_id(unique_id) + self.regenerate_op_api_map_and_op_names() + + def regenerate_timeline_op_rule_with_version(self, cann_version=None, torch_version=None): + cann_version = cann_version or self.cann_version + torch_version = torch_version or self.torch_version + unique_id = self._get_unique_id_in_supported_version_dict(cann_version=cann_version, + torch_version=torch_version) + self.regenerate_timeline_op_rule_with_unique_id(unique_id) + + def regenerate_op_api_map_and_op_names(self): + self._dequeue_op_names.clear() + self._aten_op_names.clear() + self._optimizer_op_names.clear() + self._dequeue_op_api_map.clear() + self._aten_op_api_map.clear() + self._optimizer_op_api_map.clear() + self._parse_db() + + def _is_version_supported(self, db_content): + """校验当前版本是否被规则库中的版本支持, 保存版本支持信息数组, 按数组或字符串的可变方式保存""" + if db_content is None : + logger.warning( + "The rule library is empty. Check the rule library file: %s", + self.timeline_fusion_ops_yaml_path + ) + return False + for rule_dic in db_content: + if not isinstance(rule_dic, dict) or rule_dic.get("unique_id") is None: + continue + cann_version_list = rule_dic.get("cann_version") + torch_version_list = rule_dic.get("torch_version") + if not cann_version_list or not torch_version_list: + continue + supported_version = [cann_version_list, torch_version_list] + + unique_id = rule_dic.get("unique_id") + if unique_id < 0: + logger.warning( + "The unique id: %s of the rule should be a positive integer. " + "Please check and modify the rule configuration in the YAML file: %s.", + unique_id, os.path.normpath(self.timeline_fusion_ops_yaml_path) + ) + self._supported_version_dict[unique_id] = supported_version + + # 若解析timeline规则库的版本支持数组为空, 则存在问题 + if not self._supported_version_dict: + logger.warning( + "The rule library does not contain rules that support the current version. " + "Check the rule library file: %s", + self.timeline_fusion_ops_yaml_path + ) + return False + + # 检验当前版本是否被规则库支持 + is_version_supported = self._is_version_supported_in_supported_version_dict() + if not is_version_supported: + # 若规则库不支持当前版本, 则log警告信息 + logger.warning("Unsupported versions: cann-%s and torch-%s, supported version list of ['cann', 'torch'] " + "is %s", self.cann_version, self.torch_version, self._supported_version_dict.values()) + return is_version_supported + + def _is_version_supported_in_supported_version_dict(self, cann_version=None, torch_version=None): + """校验当前版本是否存在在规则库中的版本支持字典中""" + for _, supported_version in self._supported_version_dict.items(): + if self._is_version_supported_in_version(supported_version, cann_version, torch_version): + return True + return False + + def _get_unique_id_in_supported_version_dict(self, cann_version=None, torch_version=None) -> int: + """校验当前版本是否存在在规则库中的版本支持字典中, 在使用前请检查是否支持该版本""" + for key_unique_id, supported_version in self._supported_version_dict.items(): + if self._is_version_supported_in_version(supported_version, cann_version, torch_version): + return key_unique_id + return const.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID + + def _is_version_supported_in_version(self, supported_version, cann_version=None, torch_version=None): + """校验当前cann版本和torch版本是否存在在规则库中的版本支持数组的元素中""" + cann_version_list = supported_version[0] + if not isinstance(cann_version_list, list): + cann_version_list = [cann_version_list] + + torch_version_list = supported_version[1] + if not isinstance(torch_version_list, list): + torch_version_list = [torch_version_list] + + cann_version = cann_version or self.cann_version + torch_version = torch_version or self.torch_version + + if (cann_version in cann_version_list) and (torch_version in torch_version_list): + return True + return False + + def _parse_db(self): + """生成输出的规则库""" + self._parse(const.ATEN) + self._parse(const.DEQUEUE) + self._parse(const.OPTIMIZER) + + def _parse(self, mode): + """生成输出的规则库中指定部分, 如aten, Optimizer等""" + op_info = self.fusion_operator.get(mode, []) or [] + for ops in op_info: + for npu_api, op_combined in ops.items(): + if not isinstance(op_combined, list): + self._parse_in_list(mode, op_combined, npu_api) + for _op_combined in op_combined: + self._parse_in_list(mode, _op_combined, npu_api) + + def _parse_in_list(self, mode, op_combined, npu_api): + """生成输出的规则库中具体部分, 如{silu: torch_npu.npu_silu/torch_npu.contrib.module.SiLU}等""" + if not isinstance(op_combined, str): + logger.warning("Error type in yaml: %s", op_combined) + return + mode_str = mode.lower() + getattr(self, f"{mode_str}_op_names", []).extend(op_combined.split("-")) + + new_npu_api = npu_api + pre_npu_api = getattr(self, f"{mode_str}_op_api_map", {}).get(op_combined) + if pre_npu_api: + new_npu_api = f"{pre_npu_api}/{npu_api}" + getattr(self, f"{mode_str}_op_api_map", {})[op_combined] = new_npu_api + logger.debug("Output rule: %s: %s: %s: %s ", mode, op_combined, new_npu_api, op_combined.split("-")) + + def _load_yaml(self, file_path): + """生成timeline规则库""" + logger.debug("Try to use the following yaml file as timeline ops rule: %s.", os.path.abspath(file_path)) + # 若文件不存在,则报错, 并返回空字典 + if not os.path.exists(file_path): + logger.warning("Path: '%s' does not exist, please specific existed path of " + "fusion operators yaml file by setting env '%s'", + os.path.abspath(file_path), const.ADVISOR_RULE_PATH) + self.is_empty = True + return {} + + logger.debug("The rule yaml file is successfully found in path: %s", os.path.abspath(file_path)) + + with open(file_path, "rb") as file: + db_content = yaml.safe_load(file) + + if not self._is_version_supported(db_content): + self.is_empty = True + return {} + + logger.debug("The rule library supports the current environment version.") + + # 获取所有版本timeline规则库 + self.timeline_op_rule_handler.set_db_content(db_content) + + # 获取所需版本规则 + unique_id = self._get_unique_id_in_supported_version_dict() + logger.debug("Program is using version %s of the rule.", unique_id) + result_op_rule = self.get_fusion_operator_with_unique_id(unique_id) + if result_op_rule and len(result_op_rule) > 0: + return result_op_rule + + logger.warning( + "Failed to load fusion operators database, skip analyze timeline for affinity api," + " please refer to database yaml %s to customize your yaml.", + self.timeline_fusion_ops_yaml_path + ) + self.is_empty = True + return {} diff --git a/profiler/advisor/common/version_control.py b/profiler/advisor/common/version_control.py new file mode 100644 index 0000000000..e3b3006a80 --- /dev/null +++ b/profiler/advisor/common/version_control.py @@ -0,0 +1,26 @@ +import logging +from typing import List + +logger = logging.getLogger() + + +class VersionControl: + _SUPPORT_VERSIONS = [] + + @classmethod + def is_supported(cls, cann_version: str) -> bool: + """ + Check whether the CANN software version is supported, which can be viewed by executing the following command: + 'cat /usr/local/Ascend/ascend-toolkit/latest/aarch64-linux/ascend_toolkit_install.info' + """ + flag = (cls._SUPPORT_VERSIONS.__contains__(cann_version)) + if not flag: + logger.debug("class type is %s, which is not support current CANN version %s", cls.__name__, cann_version) + return flag + + def get_support_version(self) -> List[str]: + """ + Acquire the CANN software version + :return: supported CANN software version + """ + return self._SUPPORT_VERSIONS diff --git a/profiler/advisor/compute_perf_analysis.ipynb b/profiler/advisor/compute_perf_analysis.ipynb deleted file mode 100644 index e7a663130c..0000000000 --- a/profiler/advisor/compute_perf_analysis.ipynb +++ /dev/null @@ -1,366 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "ExecuteTime": { - "end_time": "2024-02-21T09:19:13.937531900Z", - "start_time": "2024-02-21T09:19:13.267899500Z" - } - }, - "outputs": [], - "source": [ - "import os\n", - "import pandas as pd\n", - "\n", - "from advisor_backend.interface import Interface\n", - "import numpy as np" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 算子调优分析\n", - "## 1. 算子分析的数据准备\n", - "当前算子分析工具支持分析Ascend Pyorch Profiler方式生成的ascend_pt目录\n", - "## 2. 融合算子分析\n", - "当前支持分析模型中存在可融合的小算子,并给出优化建议。\n", - "\n", - "\"更多融合算子信息,请查阅 https://www.hiascend.com/document/detail/zh/CANNCommunityEdition/700alpha003/processormodel/hardwaredesc_0001.html\n", - "\n", - "## 3. 异常性能算子分析\n", - "支持分析模型中性能异常的计算算子" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "ExecuteTime": { - "end_time": "2024-02-22T08:41:17.455567500Z", - "start_time": "2024-02-22T08:41:16.716884800Z" - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[INFO] Start to analyse the target file: D:\\work\\ascend_pt\\ASCEND_PROFILER_OUTPUT\\kernel_details.csv\n" - ] - }, - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
pattern_namepatternlencountduration sum(us)op durations(us)index
18torch_npu.npu_swiglu(Slice, Slice, Swish, Mul)4127.53[21.2, 0.05, 3.14, 3.14][0]
\n", - "
" - ], - "text/plain": [ - " pattern_name pattern len count duration sum(us) op durations(us) index\n", - "18 torch_npu.npu_swiglu (Slice, Slice, Swish, Mul) 4 1 27.53 [21.2, 0.05, 3.14, 3.14] [0]" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "\n", - "The computing time of fusable op is 27.53 ms.\n", - "\n", - "\n", - "Advice 0:\n", - "Replace [Slice, Slice, Swish, Mul] with torch_npu.npu_swiglu. This pattern first happened in: \n", - "/root/torch/module.py\n", - "/root/test/slice.py(116)\n" - ] - } - ], - "source": [ - "# EDIT THE PROFILING DATA PATH\n", - "compute_path = \"[YOUR PATH]\"\n", - "interface = Interface(compute_path)\n", - "data = interface.get_data('compute', 'npu_fused')\n", - "pd.set_option('display.max_columns', None)\n", - "pd.set_option('display.width', 900)\n", - "display(data['data'].iloc[:, :-2])\n", - "print('\\n')\n", - "print(data['bottleneck'])\n", - "print('\\n')\n", - "print(data['advice'])" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "collapsed": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[INFO] Start to analyse the target file: D:\\work\\ascend_pt\\ASCEND_PROFILER_OUTPUT\\kernel_details.csv\n" - ] - }, - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
Step IdModel IDTask IDStream IDNameTypeAccelerator CoreStart Time(us)Duration(us)Wait Time(us)Block DimMix Block DimInput ShapesInput Data TypesInput FormatsOutput ShapesOutput Data TypesOutput FormatsContext IDaicore_time(us)aic_total_cyclesaic_mac_ratioaic_mac_int8_ratioaic_cube_fopsaic_vector_fopsaiv_time(us)aiv_total_cyclesaiv_vec_fp32_ratioaiv_vec_fp16_ratioaiv_vec_int32_ratioaiv_vec_misc_ratioaiv_cube_fopsaiv_vector_fopssize(MB)throughput(GB/s)color
014294967295126516Slice1SliceAI_VECTOR_CORE169952962310675021.20261.56904,1025INT64FORMAT_ND4,1025INT32FORMAT_NDNaN0.00.00.00.00.00.01.7729508.00.00.00.00620.00.05856.00.0469212.161371RED
414294967295126516Add1AddAI_CORE16995296231067543.14261.56904,1025INT64FORMAT_ND4,1025INT32FORMAT_NDNaN2.328888.00.20.10.10.70.000.00.00.00.00000.00.00.00.04692114.592698RED
\n", - "
" - ], - "text/plain": [ - " Step Id Model ID Task ID Stream ID Name Type Accelerator Core Start Time(us) Duration(us) Wait Time(us) Block Dim Mix Block Dim Input Shapes Input Data Types Input Formats Output Shapes Output Data Types Output Formats Context ID aicore_time(us) aic_total_cycles aic_mac_ratio aic_mac_int8_ratio aic_cube_fops aic_vector_fops aiv_time(us) aiv_total_cycles aiv_vec_fp32_ratio aiv_vec_fp16_ratio aiv_vec_int32_ratio aiv_vec_misc_ratio aiv_cube_fops aiv_vector_fops size(MB) throughput(GB/s) color\n", - "0 1 4294967295 1265 16 Slice1 Slice AI_VECTOR_CORE 1699529623106750 21.20 261.56 9 0 4,1025 INT64 FORMAT_ND 4,1025 INT32 FORMAT_ND NaN 0.0 0.0 0.0 0.0 0.0 0.0 1.77 29508.0 0.0 0.0 0.0062 0.0 0.0 5856.0 0.046921 2.161371 RED\n", - "4 1 4294967295 1265 16 Add1 Add AI_CORE 1699529623106754 3.14 261.56 9 0 4,1025 INT64 FORMAT_ND 4,1025 INT32 FORMAT_ND NaN 2.3 28888.0 0.2 0.1 0.1 0.7 0.00 0.0 0.0 0.0 0.0000 0.0 0.0 0.0 0.046921 14.592698 RED" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# 异常性能算子识别\n", - "from advisor_backend.compute_advice.npu_slow_advice import NpuSlowAdvice\n", - "\n", - "npu_slow_advice = NpuSlowAdvice(compute_path)\n", - "data = interface.get_data('compute', 'npu_slow')\n", - "slow_op_data = data[data[\"color\"] == \"RED\"]\n", - "display(slow_op_data)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "NpuSlowAdvice.save_to_excel(data, file_path=os.path.join(compute_path, \"slow_op.xlsx\"))" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "call stack: \n", - "/root/torch/module.py\n", - "/root/test/slice.py(116)\n" - ] - } - ], - "source": [ - "# 异常性能算子call stack\n", - "call_stack = npu_slow_advice.get_call_stack(data, index_id=0, ts_col=\"Start Time(us)\")\n", - "print(\"call stack: \")\n", - "print(call_stack)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/profiler/advisor/config/__init__.py b/profiler/advisor/config/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/config/config.ini b/profiler/advisor/config/config.ini new file mode 100644 index 0000000000..b8f6703685 --- /dev/null +++ b/profiler/advisor/config/config.ini @@ -0,0 +1,16 @@ +[LOG] +# console_logging_level : DEBUG/INFO/WARNING/ERROR +console_logging_level = INFO +[ANALYSE] +# analysis_result_file : filename of analysis result +analysis_result_file = analysis_result_file.xlsx +# tune_ops_file: filename of tune op name list +tune_ops_file = operator_tuning_file.cfg +[THRESHOLD] +# operator_bound_ratio: (mte, cube, vector, scalar) ratio greater than this value will be checked in operator_bound_checker +operator_bound_ratio = 0.8 +[RULE] +# region : URL of different regions where can download rule yaml file +cn-north-9 = https://cnnorth9-modelarts-sdk.obs.cn-north-9.myhuaweicloud.com/modelarts/solution/ma_advisor_rules/ +cn-southwest-2 = https://cnsouthwest2-modelarts-sdk.obs.cn-southwest-2.myhuaweicloud.com/modelarts/solution/ma_advisor_rules/ +cn-north-7 = https://cnnorth7-modelarts-sdk.obs.cn-north-7.ulanqab.huawei.com/modelarts/solution/ma_advisor_rules/ \ No newline at end of file diff --git a/profiler/advisor/config/config.py b/profiler/advisor/config/config.py new file mode 100644 index 0000000000..183c2ed5a2 --- /dev/null +++ b/profiler/advisor/config/config.py @@ -0,0 +1,103 @@ +""" +advisor config +""" +from profiler.advisor.utils.utils import Timer + +import logging +import os +from configparser import ConfigParser + +from profiler.advisor.utils.utils import singleton + +logger = logging.getLogger() + + +@singleton +class Config: + """ + config + """ + # pylint: disable=too-many-instance-attributes + + _CONFIG_DIR_NAME = "config" + _CONFIG_FILE_NAME = "config.ini" + + def __init__(self) -> None: + config = ConfigParser(allow_no_value=True) + self._work_path = os.getcwd() # pwd + self._root_path = os.path.abspath(os.path.join(__file__, "../../")) + config.read(os.path.join(self._root_path, self._CONFIG_DIR_NAME, self._CONFIG_FILE_NAME)) + self.config = config + # ANALYSE + self._analysis_result_file = self._normalize_path(config.get("ANALYSE", "analysis_result_file")) + self._tune_ops_file = os.path.abspath( + os.path.join(self._work_path, f"operator_tuning_file_{Timer().strftime}.cfg")) + + def _normalize_path(self, file) -> str: + if not file.startswith("/"): + file = os.path.join(self._work_path, file) + return os.path.abspath(file) + + @property + def work_path(self) -> str: + """ + get work path + :return: work path + """ + return self._work_path + + @property + def root_path(self) -> str: + """ + get root path + :return: root path + """ + return self._root_path + + def set_config(self, key, value) -> None: + """ + set config value + :param key: config key + :param value: config value + """ + setattr(self, key, value) + + def get_config(self, key) -> str: + """ + get value of config + :param key: config key + :return: config value + """ + try: + return getattr(self, key) + except AttributeError: + return "" + + @property + def analysis_result_file(self) -> str: + """ + get filename of op result file + :return: filename + """ + return self._analysis_result_file + + @property + def tune_ops_file(self) -> str: + """ + get filename of tune op file + :return: filename + """ + return self._tune_ops_file + + @property + def operator_bound_ratio(self) -> float: + """ + operator_bound_ratio + """ + return float(self.config.get("THRESHOLD", "operator_bound_ratio")) + + def set_log_path(self, result_file: str, log_path: str = None): + log_path = log_path if log_path is not None else os.path.join(self._work_path, "log") + os.makedirs(log_path, exist_ok=True) + self.config._analysis_result_file = os.path.join(log_path, result_file) + self._analysis_result_file = os.path.join(log_path, result_file) diff --git a/profiler/advisor/dataset/__init__.py b/profiler/advisor/dataset/__init__.py new file mode 100644 index 0000000000..9fac2c8eb3 --- /dev/null +++ b/profiler/advisor/dataset/__init__.py @@ -0,0 +1,6 @@ +# import asight # noqa +# import asight.datasets.graph_dataset +# +# from .graph_dataset import GraphDataset as GraphD +# +# asight.datasets.graph_dataset.GraphDataset = GraphD diff --git a/profiler/advisor/display/__init__.py b/profiler/advisor/display/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/display/html/__init__.py b/profiler/advisor/display/html/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/display/html/render.py b/profiler/advisor/display/html/render.py new file mode 100644 index 0000000000..eb427ffc5a --- /dev/null +++ b/profiler/advisor/display/html/render.py @@ -0,0 +1,44 @@ +import os +import logging +from typing import List, Dict + +from jinja2 import Environment, FileSystemLoader +from profiler.advisor.common import constant + +from profiler.advisor.utils.utils import singleton, safe_write + +logger = logging.getLogger() + + +@singleton +class HTMLRender: + def __init__(self): + self.html = "" + self.render_list: Dict[str, List] = {} + + def render_html(self, template_dir: str = "templates", template_name: str = "main.html", + template_header=constant.DEFAULT_TEMPLATE_HEADER): + self.html = self.render_template("main", template_dir, template_name, render_list=self.render_list, + template_header=template_header) + + def render_template(self, key: str, template_dir: str, template_name: str, **kwargs): + if not os.path.isabs(template_dir): + template_dir = os.path.join(os.path.dirname(__file__), template_dir) + + env = Environment(loader=FileSystemLoader(template_dir), + autoescape=True) + template = env.get_template(template_name) + rendered_html = template.render(**kwargs) + if key not in self.render_list: + self.render_list[key] = [] + self.render_list[key].append(rendered_html) + return rendered_html + + def save_to_file(self, save_path: str): + if not save_path.endswith(".html"): + logger.error("Skip save html file because file name must endswith `.html`, " + "but got %s.", os.path.basename(save_path)) + return + + safe_write(self.html, save_path) + logger.info("Save suggestion to %s.", save_path) diff --git a/profiler/advisor/display/html/templates/affinity_api.html b/profiler/advisor/display/html/templates/affinity_api.html new file mode 100644 index 0000000000..f059fbf4c1 --- /dev/null +++ b/profiler/advisor/display/html/templates/affinity_api.html @@ -0,0 +1,50 @@ +{% if result|length > 0 %} +
+

Affinity API Issues

+
+ The analysis results of following affinity APIs are based on runtime env + cann-{{ cann_version }} + and + torch-{{ torch_version }} + +
+ + {% if empty_stacks %} + Suggestion: + These APIs have no code stack. If parameter 'with_stack=False' was set while profiling, please refer to + Ascend PyTorch Profiler to set + 'with_stack=True'. Otherwise, ignore following affinity APIs due to backward broadcast lack of stack. + {% endif %} + + {% for api_name, stacks in result.items() %} + + {% if empty_stacks %} +
{{api_name|safe}}
+ + {% else %} + +
{{api_name|safe}}
+
+ +
+ {% for stack in stacks %} +
No.{{loop.index|safe}} code stack, called {{stack[1]|safe}} times
+ + {% endfor %} +
+
+ {% endif %} + + {% endfor %} + +
+ +
+
+{% endif %} diff --git a/profiler/advisor/display/html/templates/main.html b/profiler/advisor/display/html/templates/main.html new file mode 100644 index 0000000000..1a9392d2b2 --- /dev/null +++ b/profiler/advisor/display/html/templates/main.html @@ -0,0 +1,202 @@ + + + + + + + +
+

Performance Optimization Suggestions

+{% for key, renders in render_list.items() %} + {% if key == 'operator'%} +
+

Profiling Operator Issues

+
+ {% for render in renders %} + {{render|safe}} + {% endfor %} +
+
+ {% else %} +
+

{{ key }}

+
+ {% for render in renders %} + {{render|safe}} + {% endfor %} +
+
+ {% endif %} +{% endfor %} + +
+ + + + + \ No newline at end of file diff --git a/profiler/advisor/display/html/templates/overall_analysis.html b/profiler/advisor/display/html/templates/overall_analysis.html new file mode 100644 index 0000000000..4c859a7bf9 --- /dev/null +++ b/profiler/advisor/display/html/templates/overall_analysis.html @@ -0,0 +1,15 @@ +

Model Profiling Time Distribution

+ + + {% for header in headers %} + + {% endfor %} + + {% for row in rows %} + + {% for element in row %} + + {% endfor %} + + {% endfor %} +
{{ header }}
{{ element }}
\ No newline at end of file diff --git a/profiler/advisor/img/advisor_result.PNG b/profiler/advisor/img/advisor_result.PNG deleted file mode 100644 index a9652f4ca53ff142a5ebd1033075aad54f8f0297..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 53557 zcmc$`cUV)~(?9Atwxg)%K?MuO3M$e|L`tl1lqP~8T|uOVNSBgCMN|ZgNGCv4dY2kP zNmQhl&|82gkOT+;Nk~Ha4W9EozVH40?)|;bbN{&ec_2G0Yp*?f*37J#&&=9UH%$$9 zZa=tv!-fqzum5%R)`kt6;2SplZu!R+aZAjbS0>_*-~4YGUfxjJb9i3-Ws}<_lS>;m zlp`eg_cn{aZ+rNcjsJ!XyPDSjertu~-`}vIx8wTNOO}ru7X>t=>=2Y9!A3pAElP(% zeFBJVOzFcP+FLWUGY@D;`SofWKXpzo*F7nxcd}*s`K!lvEuHpF*|qJ))<5%BzWSYz ze-q@aZQ{MSWozDb-EUUj=~ge(x4ft=k4x0rp9{V}(K0HWWZFsI_A0TxT_3bODLKv! zuf-;ZjuhHT;;IW-j!ss$W4DRP{CUKjO9B1o{KO8jtiv9x>SNx4#L#JGeFu1kN_W z*{-R&m~$Wy?*ONO2)BnVqW90XF2gK+)*^68NTLYnx7^f8pBS&xadx|@PI7ip51i6s zPz1{$B^+I0S?vOpTSZuxi1j1u>wt1`r7Keqv~(BhaXf(YQWc)b?{e#+EQ5Gx`hsQUeEb3d$28yw{aK?%kvIVXmxvN^ zP{|mKHhNvL#K+y$kWK*1wiHW^_hYwaCJ{Mv9+(9^Ks;PTg`W|zM#!YtoE66V{xyyh zk#osrZNh(Pj<%U0|Lp~c|2}-?5K#q=69&WOq?c;XX$3De+`=+mfX)!Lg##7lb2{)x zA4gfZR$c>7bb`p^Uy(x?J&}?(ZG&^oL>^ULD2o9#EiJi$ya_j%U2f9%>2-~AH7+hv zYB+Mua%j?hTWWZ*?Vj_-Nv}P0bECzULwtO5o(73N6Wb7`ddEfId(7H|y=x(C!&xGK z0o5s5Q2@MEfOE`9a9+;VS-7BCvSS~53c(rJYWG0+2#c#YO|+VEql@GZ`7bAopkP;L zFE>>_-VR&q7$Gs?YtOv-^NMs@un122A{$2HiTK$@lX`RNvCmC#WW+)5`b?S%d^pK< z>9vhb5WoswhL#?@^adA0x zq<*$cxKP$bC5!lYk-d-#7nIRwK*AY18Hlr4!t#7ZZ#|;Ud$g+km!#h68~zt%{!{zl$v`?v#DHtj z9BgcS!z|q=<>lo`a6UqG&)IizRMmoF2BXLOAi#GIb)k$PUzT?y)GM{@hm`zd5bFWZ z$`J^3HHOb$d^@pbXx2uUVta7rQ*m*Jgb&Syoq+HgyC!+o^dw@KC2S#BdKP3N$XVo( zFrU`6@5P`xi7PNKVKGtJPl&$e$d!#tj>h58Oy%e={OfB|d7z`9N5V5~4+j66Q_^gh z8NZx2h(xVLp;Y8LMsSV?Og)8jT0@=6xWyW)aP9y~%fqwwS-$TF0#PJnA8Il%K@oiF zO<;l%_@vC4w4k}(_jB(D5&GQxG2!drh9+>7xs<_xU^@yAV^y}?yNbl$RgFaf@)CEKZ^mXJ6$l_s~#V7+oITFLMdSo1jc&?sp2-EUxO|q8H)} zkNAdpZ%hH%eQNf8cUM@Pk*-3y3N&d8r>ZY%WI|&El?*lCnTFLmqnp(8zJ4XurD=|a z)PbU&b=7!yhEYx`>v%$h+LvN@W!(ysWlB!-nb)9rwImW|KFgGL@UXk$WY96sn|-CQ6rVR~*l9$c16#L2^Cnvp zqydR`kg%#z1#(WL9cef+zzbM2$nQ(kY5IU_cK|!?yOCHxRfXW8h7vypz7-d?b+JbIHjHaD<}MZ?3~e}RS5jn8jULUYZ8au zre|NBYh=G$R1_Y8Hyul*QiuBrO{jSf?te)H3ZHM2isp811v-`SrtDBL6#4J8Vm(y_ zL!a|ES;*nY&J5q&m33=V(Yff`QL8=PO zsT=68q}kuj?_+cvJa6mMQ_Cd3nFQurOQ&+jXSCFV*4O55)1+nyxUCw;2KKCl6l=g1 z6(&JR(42Zzm=8}Xq)ErDZ6CU;gY=?g#Uc%)EO1<5(50{S2%ffCLzG~QDNApir5h6m zxxibQNxah9+ecz`3nsLT-02zu!K!g|NWwj%Zd8VI3K>zFyV5EU5JimyTVEkRMElW? zch^NP;A>maw`Uudp=D#NCz9cxz~$j&b(AV0aI@JBB&QOK_37wCKOuD?>zC>}4;OL5 zgiFi99&ZZ|-t*U{Cx>)v7vC6X*eB%3@4wGV+W6pYqbSqWD|Clg0YHA)ZC>`frc~u6 z%+9s2;0^6eMUlSKX4fcR(823Q|y?BSCS6F zcidAfI}a;3594@GI(oX$-fxo{*@!moLH82(FF{RA&sz#`fY zjkN^Zzt+8VKwE%#4?gNrTUC-@Fj)08_vQOed<|O3tFZP?u=ykXEle6%_{=ib9LSvn ztgE_GkZ^G@nUv-NR}_sb>pqxTWCEDK;d|Cr$^syvl_UGm6MzGtLOg#!;<&P~t_{<4 z+)2QhMlZtmaXMm;EA#Em$A6#(sADm4&MHNZOWhMRgBA84W$^ruD;vKI zC%Ik4@||97>wxbMX%ta*1?K=xT(h2A8@ixs)(K6us~Q>Gz1!dlld{pZMB- zvo8s^D4(|iGcLd&;8Pc!+YP^C$2WJJ91O%I5Tnrm-vc1c6+np7mm}$ zZOXNxi_8$)e07u%G4H2MVxnGXuS z)Q)Tv+dQ!ffatH;$u0QA<@Cf>q0s0!@ezE*jv*H(?X#CdH;dMV=cnlM6WWrePSP{h z&ge8{JBb;*Xs;SSEDsUt>iHK<*p~7!w~y!v@+WL{hRIQ!fhInu1&IA#W3|Is_%kRP zGlVd=bJkixJ~h;hnTa_$rx;^6l0r^5zmL$WVM1KYF}NAu(3ky)VF=iz7vLGs-J%5+ z?oD@i)9w{O_mM}=K+c~wTuM#Op*Tn9_z2H+R!zMQq9rsA$;UOCbd??VWgeL8w&@1b z66KMvUd6V`Q1=IK|Gc#AeDvltiamSVVW{Unre`0Mq?FUo{DfGUI0}6z*?dAVkYt z^){F(5{0QNtk^lAABD;KQfl|&_9Ta`e5$_EXl)ziv4XxTy2?~^;@2u zq-_#U#^X409nmfUFht=ewBQYmW+)>5kYp!Zm%9feh=7Yj$+!VOe!S>tD?m?i89uvA zLhRR9hOcfbibn~brVn9+o$zTZUW3VOC-9}NXn&k?qs^?-u2;J3nd8KXI%!#O%U=Zp zAL)kM0Bc<^UC zrFzKRfV@+ZZo(L17QX6$RpXW-2G^o;$wt>oJZ(o`_#ofT6LxJPDU_xwk@2ClKq#V_9zfhyr?9fc3el@maaXf-pXAu8E@+-Kq4bMs+-pIVdZ?Y!W|b%S!( zvQ{{CtsKG%tqrdnIW&oEoDyQq+Q)%C(cwJbI0*EwS!DABaFC07*}%UP$6&tZFNkI6YYA)_Y?&|R3Z9}`JS?5(DUo(Gqc#Xg9{6{ES>4&OfAXO`h18*<@XZN)SB_JoQ4$m!mS62u zQT7lh83+qoA&lw`juo@|2n_+%c(d^#eGy;uGJNG=oExkvC3i715yS!dm($(urgfp} zm$3wI5z|S;Y`fSug63IQ0`x)$!PR0*k|XJ58Y`{osX>!ne)NL>Z7VY`3Z?Del8qD4 zAV86D3?)c4Do_W3AS_Ndk#B;|xy;ZHU;B7n)cZnzwx*^shm0^?V`}9n`gUiB4Hus=H<^gu#%1zcmuV()=FA_&iKfRib?5%AD$@3+2e1ac}c=N(SJS~gdnVa-a9wE>n7P7SV4Dl(cW8Onp zD^SZFUf4LiBj5N-Ze2Y(Og1BFUZH8LlS1h^u zFNe-(c6p{AE!FhT1CtOn>MEi3s0W?qF~3n*!DPfzL3xL4d=X>(dSiF-sN>)n$e=rJ zOW@fvHEu0t*ZNJ@mkAk&6)d$MF8*ETA8^o<@Lz`MXNAxN014%=qI3spfdo*PO%>fS-YkT!REkgz1x`8c+TM5JosCHXaxZDSP!{lqCP12uMVGs-l_C~*mAOp!d6Rc>gWcL&)m^g zeK~QMjeF3)`dbe-s7$V5C5w53x_6qmJ2x#eQ>&#P$M3||4mul4!@XoR1j%qta`z#u zKBwI*&23kKYT$AsyG~Eh&P%v=?&__bheUhAdh6C}9z^)WG z;O6z(Fdl-_>I(*J44CLahnrWQk5h^?Cv_>Wo(LZSJM|d4Uvg+|_ z?2oTELz#kmjOSG=cbji}3j+XG>dGJJtz^WX z?5tCoaK4*pxxYN!CW|>PUi|_NuTOK#ae%PPe+b$5)VdDNm1vgiV&E(efa=EGXc@;T zu+Jr|ZGeShbO`3f1rS?mNvq83)i&nPar~mPoKRG{X74Tft`ss977;Sg%nz}&-k~^) zz?aE+J`hJdBO;+PsTzSboUP^}OD-@bA)Ub#c@i^Edg6Idfa`1Equ~*%WFU00ymdAY zwvcxt{+h^2-4NBDg&F|5gfJ*90wh@(wi4~f=msXY7Or0Et@F#@2^G{W>1b+_YBaK* zBg$|$P}la-zep&zg-tOs`@hlqgSxu9Y8%hl`3O&Y8L5R4E@38$#}6cTmVu7@n{D>R zr-16-x{zkG4~Iu+9%4BL-g#hJQXl1he$T|4#20x_DfN3fPX2dtk2pqI`^_~y9&+!6 zcA#DNH8>huBDUf~`Y62H$t`ORd=q;j4b+ce?p4s&z2DhN9Se2p@}<;|ALjW_ z4-Z#O*bQtSHtX()I~3j+IC1#xiPq!~Pzy#LLuOp9s!>tFZh~`f$oXjk?>28#KfkA> z*35fiV4rRQ$E6%P&1T9WoTulxh2xp(MH&cuK{O3aM$~w{^ZyB@E6HJnCChE_wOCvL zgjMJ`FC0@Add00BapTW<0vfxivg=`v^Pl4BUe|0?U^QOk3Srn{Zp@;DC2i6|lQ=xj zKFaP2$lylxWkc%?Bna^=srx9@;O5HMVd+%Edq>b6S;}?YvZJ>chOo6OoF0JeTkB;m zn$GQyI9}Gc@aKgelM?hsdEp(b9usnZX5%%(;H#+4nVox})uFFrV|P|=pG$SijB>;X z#o;hsw@Ln_27Iq(GL}zLM!Nk7r+GQ46UKW5L{3j|nlAQ7q;hQuZEoRcqr)PHg2GST z$enA-9*XG?3TGBMl?>VSxm261Ls66aM-QgI@2mUrTj+Ebs!5VkjEYWa`}T*`LjLiY zrr4~jV%X}rF~(_FqugT%yUm6n-;z{qsg9Vwu=mt8bgTy^(4`~4Z=&x`m5j@})~LJ< zpM&dqO)c&dd(V|lx^CU5YNsl62$V^lMi)(yhgNU8iTU}szMBdfB{X1vz-?y?tp)s} zp<3;|;;6A=z`KNF@go8PGr>YfaEa>}_w1K40ls2I9@xpwTzeNTLZK1>=}w6NtRJ>_ zg!F=fQm)ZEM&zv)d-GTDossRLvBQjoNgnBi^u$Js3m+RAyknA3JdE?Z9Zv@N^T`O< zY8H8I)#kFwjl04f5kpd2bHZmvUYgen7T)IFbNzGuG3@^ogR1^?0)SJ@?_Zpu8v)k5 zIMdd+WAy1V5@3O!h?3@vPd1yi5%+UO3Y<|o5(0yAC)HMcoip?=W&3&j8}Eb1_JPXW zAlV1crhs2&k*YhF)Fba}9rY8+!RuuP1Ef9k3 z<%rNZ;OvV-Wwlv^K0?~+z1A1IWYr8}_3v_qta-J~#gW}9Aa3z7XjaORJThL&O!xb( zZy%0^?lO`*(BvlHjhfZ^aKsau|Ks&XISC+D{Vl8GbCyJk+fNFU&x%67isyW7ea?-< zYy&J_$zW=gG0o~C*0T$nG+i@oLRxg~T0z0&txjR4Sl$n~y22Bzph&dbv3f=*>{&&$ zdjW7pupdmJXc>191c6fRNNX!Irb^wYp=D9voh23{(PbAZFciNetP zlny!)Rkk<+X?(pKhsHul)#h-Gy+5h?)WaAG))K3+Xb&x|s$#J9Xio6v+lnEazieGOT}+ zoKplX-4kNv^c{<<>-Jjrt2uyXm}sbR$m={JY+lT%X9y5~MGUcuqC?R8em=#94M%6U z9;cYynW5N-6$5&F6wu#;9eegXH!*P^hfe{) z&2LaMy`(Nn*Z=Ce)5P65Q-g1+!C{WF?| z#y#N<>lBc?%(yHWPeDj;Q_p!4AS_m!CC$2o3xYlpXb`oXu#EIVDI?kBmSz!tHy*cD}#6W-nIqW zA2_Y$RpYS+*OUEA+WGU~RiC`og9)!~!W?ca_{12|;9zUUd7=rJpr+|ad6kmm;d;({ z;)4b}mU{2>2{D;j;&_R&o&k8JP&${JDmsRkN*bHWPR(;TvpH|M2#%$hhqObzUGmBJ zBIS;F_`~=HP9E_DYVC$xW7cZ$H6NY?WBH9}WC~C)KV`v$hRwGV6<-S`^C~==EbS@8 zWO;Sov;NRjRytQ;%0qk9kB{b;g&R+mvt`xFTrLqR+w0Qxn3qD>U+uDp84xI?ksjsd z*t4`;-Ay(lxB4ny0~HrVdAP4FDlz0UcWaH5cp@@hMxL#KH@MQxJH6y{M25V{2i2V` z;`pZo0V96jc57VPm*`r8x2r)B-~BF;?9rF1KYfdRo! zk#+D~@FL-(5gjGg6K4_FIem7-5u|EhW+vwLl0t%JRKvlpckfd{WiV^U@I~}&iIDwm z!Ou@uJS_wUu(6iET+tx7?KkxF8U1)y=_!mG+RDju`+$&;S@H3}D{a5uiNj7pYoRj&$4Z>pG6 zJ+^BY&vk9W94@OjVwRJ6WnfhI={9ssfluYk)K~Xp`Yagu$HBEAv2$yh(4NJ4g;{-}bc?vC&^3L3jg%(?{M+hJY`M_4auV ztP7RrR{GL%b~*xw zdkZZ`8m8CSUMOHXkEB{EinVX#Yc_kJ?eVv(%lE`?BqXW$xQ5+^;Z6_F*RI<<1G9{7 zK%*@66)(G++dg%c-2EY~kMS0BdvSR4q_e10)@#t7{E3WEhwUQrE@76Fr)&>ggfuz` zR{;b_r^j1iX&ZRiLzs{GGY52veUNcUB-m4qcC=Cda_|H|z4bXEtAek3FSF+p#u^bk zuNNzyXQeKuX>d8KO`r!DY%hD)r<+BOkhw}aa6)bZ7<0Y$cxww|ia4Z3ThT&&=q3rH zu9N7cIXPsrF@Vq?X+=_o7py@!EKs6yb_-%88h&c|OiPDdmbOujtOopGOx*aUPNZJz zq~rs^6Is#29&%+omoMKDl~jOC-F+(lcX%)4&x zG|W>n^$I$JzOBSUotDEq_&^=^<1@5`Le{DJlJAeyt%4~h&G31n356q-ScJ9OmS|NZ zv24yv&&>qRbE!%{>+|58X7fX2qT`hYk3B<*jau_=14m@hw+UxyZ&&R9B2ah*o<+lB zBjzHO%cnFM7+fm40`H(BaFdEytEKxVkhwYy*hE*tOvjyM3WC?LkuGquHdj{P^@@5i zWakdTp4E`^j#Vb4(QPiV;&n1M_d+U&98{(-J52CN6gMH71Rv)+Yj+OcJteJc0V`z;K-i@xvPk&sG{lmpQ8zsQLUzzTD z(|=Xf<4M$?iNvG9#spX{_`T}h;m-iT&}UK(e5>lV?sxrsao(}rv*7e(HL$%>)vD*} z+3u{5Q`LSm-0B5Bu?@0s=GzZLE5^Km^lW=rv}$nYb(`+&<$R8XC=Ot1a8?oN)~+GC zYbx)fqX5*trJZ%L8<6qW{Iko{RNjcAOv5Xga7Py@AOG$0Q#x`PvsEUJuVk{B=j1!U zg=~RooAIq^?i%)t{O9IJI|)M?O95f$eOd73{X?yGj$@%z?L#bZUl~T=U7uX!JI%!% zQFfELD)*hGu+T}byN@STK)>I2q{^n*^;Mq{XGfc4RG>Z!M-J_Etskh_$3zG2678b< zxqM1jxJOKd)s+8%-iN}1~4e|sl3_w0X9vHP8Y9OxIf+pa!fPx39C9Ne{=0{ z>L9=E(BYm>e{?1F-nlWbU=Ra*zTI`lYlFyKlebpt*F0UHw?>jp+#fZQlRa7qMDsFK zfq(B7bXm8u>T`Qri(=!+-?Q2P5N`Hoev~ueB!h>!_j-RDUmvnof~h2%y?=D;E%IQL!GltUbC49fB@kiTm+krW!yis4@}%AmECnx{`O70=hIQGdEfGJ zb4F|IXM}Y*aAu%-ezG|z7EbLzJ$&j_#ghvi&t3aqUgiP?$b+4O9U0%P`_;Q^3$U^Z{c7!qHa&+ zb%uO0+QJoE3kJLH-_BMR4cBd2Q3an{V7>m(RhOFM5!IzH5UbR(v7;;W?P!XiJ_SS! zzqO>;@|hr9li&oLU?aH>@Kk|f;|v+mVYb~rzU@XgD5W#XP7drwTJV;;3ai+yP#k<- zTWibJk4^=XDsdVSg<$%vDjhpBow(xa0?YGpv9e>W9x5=5Rs$S>WVRpNhSa)_$?YjmwJZQJ zT9Y$q@y6bs<>0~1oJGLi8EOgc6rAC#)YvO6wDdXK(xI%MfYbD*bOY3FPQzrRP8#=k7ZHldu_i=ucKR7sB++OG;q9FMXxE}E4l$|{JCLBDW;4&#``xI= z;~l6snPjsi?aQ7QclPo7mV+ETR>bCLj7_``-?*bsEU=jv+GH2NO=WI-Lht>#CVte; zBVNxFcm^Jk<6WrxP8J3c^UnQWxX)YDa8i}>X9v=B>dZnV?6Fe?pIs1W6i)+Q(Q=Ci zcDgrJ!tIcs)q@O;G!W&@wsyy+Py6?q$G?2$d>*}Z5r)|5edS>9xv$}lwou9MM7fpJIv#p-4JUlBfj5u1e zyk%p4DcQfXv*p+TDC*ds$fGOwF{(-Rca4MVo}UDlAb%TTc2Zmk6GsTNTNZ#8Xl})f z_TeI|LoV2Ja4Xa*CuAn80N+$Gsh!2~D2_QW%U0Mspk_3oCpdGO$C)aLq84)`odqq{pKiE%M6*)m8^cE42I^=4S*zlRq#%anyEn_ zIyjYI&NFm1kh}3-i;;IT!K@P|hsyPvAbvm8GF^jVJ!8@d9a?g7R(E9%oZfy_qhCXF+{Iy2VX*Ck#wTcJlYg%S7(E;`x)q6czi|5zFS_3mN@c&iPue4SYW+0e487qOezpU!NAiz$O6j0O9Cp7e!h|}9@9zDd+NvrloQ)Hr&28EC$lZ0Lk?Rj49$@{S#+P3)|Fhi(@BP0)4NxRV8N2o& zoT5W)fAIX@A)W;x)>Fw1=~|xRAhrzax$D-*KclbTg63H8;wLPKxe(&{Z>9H!m|gz` zjrpHcfNP55=w!#Y@U<84H+Z;^VW*^M*x6a`z;rKaLgNHrPJc2sLC3omaOg~TK$>(dE81b!8){aH~6MHe08hC zz}0|JcWqn8*R(=IJJ|sGY6%3|2_4s8eZe&SC`IxN*X+>_IxhTrmomCEiU;yYNbM^peLV=OqBf+jW2*L8}<1Xrpnc|bW8|5&R4=CLh0twHF( z!A=W5*M*v87`vVx0pM>~zdZ1Ox{hk8u8xh?`o*hT$!4E@@~`|svJLv6|MQ+TuR}v_ zlp0Uwl7CevhLY<>*aVaNUga~IC8%QBLS!X+}AfxH82snxQGeV@&01^Uyt%uKb`7!Q=RO23Z47c>j2pNa&4hhYjyFzPC z_c3Z6TCbpFM;)1bWKT=cb#NwjJ~}t_{)E=hNEwFj6o}%zvk1Xp55I%^yDn&Tp(Z8& z7N{NZVg)hl6hInmr)K53g+;{sa-tWdlk&V$tZgZjo#j_L^DL*JK?t|>Y_x}RHT1WX zMxfRpL1a{~HNj~ue*#83roAflGpPZ-c5aD!{`;=^TT4a~=rkFkpdfj=y=I~L8m1o6 zj&=LCmY1%uuu@v@y0V##k<_v#i-}gQFK06ViT*xbAaAWL;{cvdt=$oUGbqSTwwp2> z2{Vk*3}4cr>F@XpKU#D>M|J6>*I(6rW==SIvmt3IswhjXG_%&?NOIG<~*dRRY!{g`T=sBD22XINo>LzQ<^ygT?9GkL4;SG!6Mew1qS z_gjjO_oFEXt3OWbpo$E0hC7Gcbh=+iOS{rN9kwy#Ft+l~(7LDbS|!^bkoxBiNM+q8!0lOx>qX z$d3s58GgjAOUd;_SA6hf(A;z2J`wA+!!mtQ)7!~;!_qVYFokK<@5%8=&~eaai$2r*X!{^mAXc6K7v&6E# z#M8giTNg6?vdYZvhE9Ok`MQmffp3)cPeZov+@fw%ESIUQ$GnGuEcw&lmOc%=XTAiV z^9adleU-07T6p$j+zC5UEr5kQ{BFZbmu=kay6|Q~-@k0)V9MP((Qqp0^HsanX2OzE z%a`{(TY4|9uxDccW%@hb^|)eJwK+cw?Q!HT)H3cQiYMO0G5eUrDm2ToVYE!!>wc$> z7~Euk!+>6;UC^^;rDO4jNA5`GWpeheOPbgY;1?C>byiEzvtoIAE1wt8j8KNuisO_+ z7k#~X>OYxS%waIiI`Y#zRoe_?Q6&lf z2%_SPDR)3^FVo=-CY&+9&rHlnb{L8Z2!1h2qzB!HA^h;-l*0>GL-M)+ zitM4=oQH-R-Fw&22SJXqJKveQVp&xa%N+x^9Vu8;3MdzluKJ@zht$4n5<4cds^(By z%eBHRTi{t5hfQ#iWV7?Ix8;@}2!~9Pd?;|Sg+kKRz+Ij)WPBz4o7vuw#ucHl<2(~m zo!2*9-!RHIFZgYB&VIaga_2ky)H+4+lCjoy-w&D8IO(N@Y3?lB(Jv^veR*@7wk7WN z;w{Sh-U?&B4~DcqghNW6v{HprwM=+aoX`s$=+%Yt7p)ejhtc-gD7H`)$}-~JMDytl zxgvch`|M$B(wKi)s@JPD{l6EetmX}5Gi4=NM(dApqY7K&Ehrdv?UfF^=FzJ;eoJWHGg8s+EKRtb}4q+ z1c?oNXTjfi!a%+mB8tBj@=?QiDG>o%WG#H2m5oU;b%lx2b-A5&$G?*6my?&{!yqfJ zQ)h#>C|vGZh~0(%jp)5YT+m@)&DN6ma~9r z?WwgDf^HKK?iiN`=ui9*?N_=6F}Hq?rB{VpiiU0%AW-qv!j-HCo0WgAg`eii#RV1B zZltE=j)o<>z)mO~!z=;9J4Tca>3N|1v+77q6$cw43rC7RZY_+U3#?}+@<8X<0y52y zA{IAxR~CH{r>n?4mnn(5li-HC!{-&;p64okR3Z&`AC6@=((m^!{UEuBxwyB1b>sGn zA%r(}!gNJBRz?$+spwqn`;r>ibKs&wOiB;b$4bjpj`ZXUrDoN+Qq>&@pJ2cjf>Q^# zb)yE@uH17PPyuH0!6w90j>YNK}FE}-b9*yjt%I`nNoN`DnnS`o(&&(ukI=NMn(_S*%%|ozJT1W9FU1b35ISg)s6$zSL)s z&DxHVf?nZ1Kq}4UiMNdtYcT_9-hy@l>J_Lq5jI z!Pn_^9p{AK(t9+L?=xg~M!kXc1!FpMW8T5hTbRF+HL#kG;Hkh8)|E{YuFq*p{jgTY zv4Hm==pVxA8^sk;qx|+h?!`Hyy_%03=4WC7lSyjf49Y1RK27YUx;ozTK*i+$s`sp!9e8 zH!Vb&RH($AWIreFEUjXB{K{uZ_7y+cA*6i639#_$Y&WWO)!{+QhJ}$ebFD)WRUno_ zSp7tqONrA&1d?#3r+jYWAe3R~s5tKKqtIaMqqm~1O@T^7BZyC-oB2rnyC!Llt4d{c z&qHPm&+ux^W(?Oo(_4(zH|lUt$tNsJ{HiJ$e`2Yo{3c-Sez|xr<1WpxDIp7uBz*E~ zK~&rpXHswDy;4OUF4;{~O{ESCCt zu_pkTl7B(Y3@rFfSzi&55q9&4t}pJKK2iS(6-Rs366R%`^2S;p>c-rWam&tPi9FL= z?`Yy(*!5|{c+5(c*wIT=efZbXFc2GPANi*p^V&OlO9p4(#Z7ruFq zp4e%obevvoL3c)o2D{(PJ7j9YFV7gN3M#k#{hSw&pqANfK9gtJ*Sk>O9|HkhC^^UQ zm1ct(!vncum%s&rw?~J~#RhjcBkT<76LzjU>-7<0r?>ek`#H-kBcYDFWCFSytOK{) zWx3U%+kZMJWMGI}Q)W_H_t&UB0o}?ks0<|elSctHBd^-$1$0PDisf_?0`mQzUc%!Cw1^cA&9yKaS%tsUa@^*P4E9;j8$#JnLuD4 zYj0H#SUX8mI0F*wb{Zrs-X7XKM9)P}o(=BNJ44~h+->X(v(-$(rpL z1W$IOerKq-drjx}xj;5n=Du+v#RK81?|V}~NY@lH-l{zQr{{JN?k&#nDw&Y}37xd8 zmlWu;fUkJS%fKStH3UtnZPIGV^eY_AR=47p*pCmpoWpn@p_LJP9 zH-Obmf2Z)DZ~K1?sgg9|cdh>}THVmJ>F-GDnV+JLIl%d&zjv^mVWaLTu~*u}9~1n& zGx?LsXa1Sdoq>P6x9>C^d?B-V_^m>^6P!5XLWBtSLPkEc_ z_tCO)mvi6HxTVG(yrrL|=aYur{E+#a^TdeYl{h&MdK zk#b5pdqm3N-zCOqb_J9BH&$tmFENTd?7op(4PO`BhPx5 z20080um8^4vE*a_w(cI+k90IaI)={Ym^zdGQ%xbpWR$FX_$wU;^xuoK9QL`tZs$%( zNYgD$$7FHETO|5USJLI#yZ^y))}OD+-%~w;tNOT$fb}ce`(Mb^$&)*g-@R$UTUcCD z^6P^4V9N#kY!@nhK0ZxP8OQg_F8}!EXO;hum>vIxV;ed`l-^%1lhb@!Cv#DgVW_@S z;m1%JY~Qb;Xpb)=}dC--KPr>sS$0~CiAAR5aH_r5vzS!-Jsl7XHn7qr~ zcQKZ1=FSpt33ocf6sI8%{2B@zaoJ2xBSO6BPHpm3dwiK8ukpC$awSDvv8P;JSNG}{ zT{V~9%S)wQD%du7e?xVN@PO31L0dmYnU6N+6Nmo6Bi37_f2;f5UA+kYt29UH_tpI$ zYyMjwRql?XB0d1e-TJFi=jsp1$4k}!rRCf4tHj^QMB&g7-~8Uc?-sD%H0SkAP~s%~ zjCaW7{d)hv!#%sjMY@>(Ldl7zKsetYUZ=l$;NQD^JAOAXusVF{A0R?Z@28jl*bwpg z|L7eC;=BW;CxdX1L80g~cm3k|;0xIN0PZ+SJGcX*fOIkJ{!cy926r+C0v(gN81eXD z_4^ooVDQSX(xlPm9SiS&y+2{mc?Rp686zZU*rSLWQDH$M4=L(+Wi2|r!D`>W2W-Yx~Cvituqg!=FNHXQG6 zfzoO{{Lp{zi~pmy^Q*@VVkm(aJNPs_-Fl=-qtQd>Z+0~8Cgb<&1fN&1x63=fBz7bqZd*v^Dop$ z(+C)Y$IOJ=CeyxCx^w@wT^c*?3B7-7?2>VnX^GA&Xs|qk0SnY>Iq^nj19TM7OZaGh zQ>Vg2(o#Gx-G}W-$TTuSGr1Gj9O%7Z$L}={Q8)D!(FnY}WlGW#;Z#YqA1}DQ(!;J$ zLyv#BP|+l@v(g$?;Kw5k1t_gvaN8o7s8YP03{6XnzE)S#sSo>KI{r;nmCZU;4lzmh zWRx=pq;U^M6tI z-ce1Z-@ow8IHO}l2N94GE1>k=iKvKDMWhE1LWh8%_l$}nHPSng-aCX2N((*oKtd4$ zLAsO#l0bNmqx1dUd)Ix}y6au{zH8n5nRRk<&a?Nk%V+QXdAf@3ln5gKTQQuwb8n7Xnx^9N6mjO!R2V<(uv9I3~5lrCTj3G?iKL3wkGL+v^ z#eeh8$_+Eh`fhjz_<=eNE3)a%=s@_-=*1M$cKGVmr^BhhxBS09TcW7uo2HmV{H(e( zcnAIOFn<3N`&pd=Zc_4P!-v-Tc=U*CuGJiS_Tg-$$L;yu3#yraYL&;&M^l1zF189{ zA$VtrYeFo#i_h$Nf4YE!?^gi1x@V(jS1Nw}Rv>@mnlyIC?w*sEf|pwRy6O)t8Opic z4`TbXAH}GdvFQ}id^2Mw_CLGgeG`?a$x=21c``Z@;F}}86a!{_w?NY_>>ew(HV}dO7lJ?efSGAQ>3FHfK4Oo0&Vo`7C&O5qWkTt>^`*Q%smG&K1SQrW5i? zOUXSjZDilZzs0QY4r7BKacCt2enRQeupY`L_1l1h2_qXBt{8GW>P6mhE@D~9YX&`z z@gb*7H{(U_j;Pu|mxG;hU&pf8`guugE$LDhh zO|JIMXc!r8c9iRJM>x+6L&cizuX&l^h`xPD_Oj_j_1gW3kK`GM$xpYK5GsOraZLoV z%DnZ>&H34^H9;S=Jnmwgu;aSwk$ODs(HdCmp;&4^V{ejzg-CN zk~@vn>}_-+tP-1OnvVS1$IiWF8+C!-A7xl{TIeX_&_0sB`29li^(I|UrLkfO%h zre_SaD{L`?2>OrYRi2~WGsej#B^SF&e_DNMC!6WITMpU&8 znWKtfnc-nq2-OW3-T2Q1Tz1~hcKg%J+ z%)j~0`3FB%YbG4|ZR_6)J>PCPIyxT1?TWTuu15a5)`maqh`p53-WJ4|=0fw@bQ?wg zc#vWJx!h{TD)PU(7Es37$B%Qm?#Jm%BUb4h7<61zGFyRb?jv39foCV$`S)d~<8y-v z4eJSo_I_1(%i-HyJ~?D3`g$#OgW>W+GcMjI@P6Nxz<^;&oHS2E(xt4ALS5aRmgy4X z0HXK9#s&QlIslr|%p=??)HN@a1&(-T;aB}Cs(<}?$s+(f`JrMFng4@k0CDO6M1lTY z)Bn%G?DtTcOeU{?`LPv%`wb1M{dd83TJD?2rVsyp(LXnYR#HuuH)a24vHSPT==-NJ zQCacwe@!Zw_J8C)^PeL7Cn1}sOn>|*S9y`Jb6DFV(7Lx`4o690B9s3!I&krRzLCSB zKWX~Ta;Z}!KyLQq=0I7T{$Dx8|LN(7|H@DPPt0EY|9SWKlQQDMZcM^`tM~?|I#~So z3Hn=MytdY~udxED9x1iSWELa7Rc2hL&X@;np2f%u1eLNPdh9Qw%kQ3L66o&Twbu;W z*w^T*9-Y>O!*JNr`c|p-1_3Zyz)^F_$>_&WNF5+|*iObpR%Q=bZ*KRyg+LV{OGshYLnv$(zAKqdQ&J3sw%% z#XW}+Xd$9Y;)f*NxSk41M=@uM4W!E@S(xHlEZhevEe_oKi~ zI>~8DOH#Glw0Fg2WqmY!p+J%S-Z#jlvOBV_?Z3)?dk1HiZB!Z1misO_D#Zy*07LL9i3-_a3 z?8aEOlVqVW3zOlUJcimp)0w1E9r16@I{g}R%1L^S znV1TqLN($VDSCu`jo#?wKUw3gR(l!$MV?Jk1rpi$XvVv`!N&B8^I7P}M`i}lf|YR2W}P(2aA6W zuuCFnrUkmCS1R+BEzxTm&v^~36nlmF3{HgMH zZYBMoRmHDpXG*ra^dudba(IT7lVsBfUPd+pB@i;9o?^CCi@oA^n}qdHEw0FVG^JZq zX%pTgbn++Kq`Jp`HLryo%q zh04Oum`eT?xS82;I2>$%osksg5B&UO;PCWpo5ym{`mVi!R(I?iquW7y*Y;?>UC%e8 zohgqWB9{nO3zm*q3U4@4X-wIOr)^k3{dlY>Nqg_>B7N-p|Ju#F0?>AT|4c@_U_eHS zdm-OWmSx>o&_6F({Q9o@H_zIm^tYD6KfTi5VV3|xyEO72 zNliXx@ol&|Cc@!qqqF@{LkfRt#-tXAxbN~_(@;N`{%Ci39rD5agA&lYK$ZkXr|%t6 zY6HVT+B#+J3Ad@2@d*0s;*+(hFojF4JT2r`SR5C;>nUr2&v%R)_{jfCVq&`y17&kC zX4#o8=6p%#{L>uFoy@a2dH$HEd=K^l8VoG^HO(A#Lz*a}ZTX^P$zi>x{uO}Ut+1cu zoFKl!D6!GfX~tNsR(Kf6!t>=wf>o7&ZEodk^psB4LoRR)B=K{(n|vdQWo_HNjq-W) zbT9ev*IA05u4Ua1gY^jwfPe4057{XvP;|-j!0`Q&tVGcqNZihlX#ZLfj-cGUiFJ5P|i4)b}pn_WMx@zi(ti&iI22K0|TwQXQ|_&QCF-)ITCIs-Q7JC z8)kADpz)wXP(a|2&3c{$xik7CNXtb0E!f1eIl*W5*pKzci& z*WqMl%U$k&{_Pjf-vCp0?g%ZcXYW+pw{rrZzFB`6Ptp* z9q#DK$-j^>pBn_eQhvA)vjeX_7z_MjALpf6pMyz3=`g( z+;w{~r>RTT1K$O(dkbJ{DmdPXk!yLZ+b)Tiz^-Pc`LWBYu+KXr6Rz}ExxKSw+`P{cllKK(2C;oA{0(SLo{cgrW^k!98Wtm-K3lt8iCV{35&CsNNphRtyD6u0$g}KZSpLJsX(; zTaXHngEkdh;x}q$E?s9I1E_VL5Zs9a z#zeJYEaP!Bh4hm*+H~ow?u3QN18Ll>xT9bChRG^D zYH{Vj>0=E*F4dDL3y}Yqa4n9CRRFU6=cy*sUNG+UQ}v7#0v}#5yP~m(q4dtDoM^XP zf3Gp;cFkBv>^m)9fVav-<^J6jlgus^a~36o+0rp*#@Ksja|fRVN z1@}QQRtwi_Zlik-r#|FBds3?UQ!4LfLi+{ndK(0$^|@li?I*ZXqED=>O#;!#!rL3$ zeNm-|6y!!i4`#z5W&G5I2I6x-tHPbnYeZhjvZSU5j8`ZQpp1;X=RV(hN0e;8EtebK z9!OX7sD;t))Vh79xrK?jwa1BSeChP* zh@z){OXWtN7yq1{9M8WtX9HmBN>v47xo7^s3ZMBmQ~EA!$U<@Yz zmDEg%--OjX$!1hg%tOK^BqN)tJbifF@KUoglpw6iy=IA7)Mq$#Kl4eMtJ6@5LSFls z`F+~_eRnz9R>h|DYRtB@M)%^0uu9{7X#vH4RP=69rMs)z1@ju|-Rb0^!>LSL#-gEl z6jgq#X_8kwZwCD+-dZl8ba1kcvkrRQ(FUO3vP`MpSTE!u=2;%qFD?E$)mP~|d;48% zSuf7FzfTQ~oUo?80>bwPFZt7kEs_+J%%yj)_9THr1LB9h`%Jq3&AdDTqjSwQW4Za> zjb(%1Vk}jX6*4`gtTx=O-5)Xq&l<*9qgq?83*6SwMs9TDbvxmkTQ(3blyRIS!o)Bo zq-x%UE6fGNA;p0eBwKtr*Va%R3pb07>khw*PZZBnU`6v@`QbcwPXUw-+&fn5B5 zsY>`?g~|W(mw%$TT9`_ic_D%UcaL}Lt8|?WnEh>Hu(YxDH=*JlV;i^li1YiZ?;t6j ze@Bqx<$Q(QCwbeuXbr_hLUrdQC#;tW#&4uI%#nI0#mweDIK@rqTh{Zcn<=WvVJi~s>k zEAj7ez^<(1EleBC*0%C_{n+a@>@vBVD$Opb(bT+&UODGf)954)O7CQZTO|oy#%$?lu)D z>rMz4sL}fZ#G<}0l=TOKZzoa6N@y!|_moPKV*Rtdpe!W1BrUa^9-+x9Ya%r$grk)u z#AA7~ax$0#eEiDf1jO?;M2ncTdtX^9M4Ic4WiXwvBaT(C?{72}wnBfnhVQY}BoC07 zmIgStnSYJ15_=6Yl`w(ef-uTs|6aNq|71fQ8_Yrf-1J*_R#zscuqhPcE|26CjiA$Yqs#nJJsBHh5To_Ap@I z^|1ZMa?}WMV+z~$NCT6~1ro=t*K|;kb7P<1^H1_Ec$Kg3iq;>#dy>>*+Yoal-7D#b zke$R6!B3&oW`Ob_Uw`@Bd#!cMOpwn-c5%#5FjcpF`@qVGG21xS4}@5RVxNqSR7ZQ; z>Jg|Dga=eM{kA5!&9i11YA1ncsL30-Wb}9%1<>2{?gec18G|d``)licK-%NB0L`sC zF!KFqfN`ViD)PLM!s-Zy($mkLqRPAhQ1PBC3NLS<^qEzmq!DB9cdY%Y?p`xCx{|AR za$++5L@C!mF)Q(&!5OM6O;7S4n&4l4L9_OQ-oKBCoCo44r1nXl$ez!HwW1r5vHaqc zo{luKdl_vtq&Jq&;MMOZ!?fyfs9}>JlJ%-8W^pIQ#%g~1wvoTNH69X3PZdzA5OA!o zZzZv49IVym{BiPi;fmD9v(DrZ@haEuEqc6>ktlz%1g42vwSU*qn^UQ90~JTbgrZZ8 zJTQFYp&2i0K7a=uCQ84B=-fJE7l`< z(%pN=u=d`N2hQ|Zfd;ncB<(UdZfN=Q-Qt@_Z(9w`fG<%1tl-5ROFAPZZ((D< zzsL1OX^E2mt*h-&XhL6xeA^Loy?@cse&4#dkoZ4x{2QQrIi)s%3U#q(e@CsNJeITP z7RMHyc3FBRIJ+{+#1gaU+n}@3;M9$`Kb}r7l3m|XbjN1SP`<@-z9d{g8AeZ;_|L+6 zt33jqE^LL zLkHV7_fKqx-r2u6|6=K}IZ4}H-cYz98Qe!v#!l_s1BPnjturJh+f2}fgsQz?M`Mug zl86Q%Ap2$Ou=%*j6;pCJ94P2QZdfnxSFAD0O0ZxgI3HU`bRr!M90I{xAmazXE=nSCpv~57KZt1&k%zPRpz~q<)LgEOhV~ z{58q9F1@sHc6|&*Wma^&Eh1@2{Afu`l9lCDd^rWi<)+y!h4d!=L8%*86?@kn$1G+Y zelj3yLZlzm*oOJb1;rE*w|2N;qcvY*$M}q42tt<)thWcPmip4dDk-Um9KQ_l`PJl) z7rv(su>A@OJh7n%Y|zUt#w5_!`-!BjEIP}xl_-o)SV8hB{V_i&!BjDrb#f@EZ?tN= zzlrnoz~|@Zi(dcO_<&tQsi*hen)KUEEr$H5`Q~~exKbz#vuwTc%E1y`M`eDh_2o^& z$ru{`fu~<0m`oU>^eqxU7q`X%ac0!j3w~Lr9;()=s_Hf|QuAyJaHX~!<>?U4hJTYN?$2(VR@7ijWTTVf2ypiHuUZGo!U_Dt_ zB=16+VF9{G>_OWHpikY6gtUbQ=mE~VtY5qR+5E?7!C(eSy<5z z<^Oc$71gb$>XtYTpec8aYbzfBsY_POHQleWA+A?SzSZJ{$9#JL1m_PXU`z(sL!I5c zY1}%Zu7B&3>Dh6LrDbWliZ30oHGRZWH};>uEQ-17%RshR2t4-dD=wx`S7}eBxJ~;( zGc7_z$bs&Rl;ziAT$0RInk1LmwMap$*`ON3C9kR3x`?RP(gA77g)VMDL8_}rDe zdSws{A094RdtH71a)7Vi#jw;+Q8Bo?S|ntSaPcZh_6IxD5KBMk@ucsr?YC1(UeYlD zZ5Z0+#eILf`(0(S0?arPWi9hg(kRQr6}Q*vsG_n*!-O~7CUIoG&6(%YhzosA_BMJ< zZ9)VJtx(`=Rocj@XJBcQWb27T01})`vNGU`db=#a2oq!3tU})sm(IUaX@22W(VHga zsvRYSBQ9Hjr^vKWX_(OBetSUUrK9mq;r@%bfdh`$8(K7hj8G3FasCd#1x_llx7r)$ z&RQZj)SdMvx|n0wAE>T*8S;3jm|RN9x93=^q>a7qpYeEj!CL=RXIb(OG-Q%n1aZw` zZ2+ZgMpioNb`Wfpf^{A05ML0<%G`HhyLdyS3#&BIxtEscP(SLKgAVsQ=3$#w zkRYMls0mv=QZE~_rw)Ne{xy9D7{iurecIvoWWo%2{=<mLU28J~9t64;1{zE>tT)+>NBXt_v% z{?+=;NYzV7pAgZcQgwG%C4kASO8dgJ#GO%IlJ(~@k+n<=tMz<_Y*M|WF^SvcL(Vb; z{bMT;{vOu_w| z0RDR39&0vLOOJ_s*rM+znBN@FCSNwhpON{LXy7p3%PFnu<#SQedKr;{BVJ zqar;Ber7xE?8q@kByfPY^3u018OgW|q}(l;%C_IC zdQqcepwGLHCR^K;HgiFJjJ6%6i4t_Uq9!5u%5|p8ybH% zr71vorG4n|$@%=df{NyCl|Mn97t5axUjKV_Y4?x=(tm4k|4G#~3VeAvc^stK2di4$;H>zj} z2Bn?5ZC*C?_Y^-}oyf<{3k>$NY@hbKzK#lz){Nutd#)=@H5nLTvC|cU=k$So!f*d9 z{#R?po|?9dB8PTivv0GKd|H0O$T)Y>IOn`W(if?WR|6=kx5F*_LXAO*L!9MI3R>RI zT0Ee;JJ{J-7Vufy)!3=8cB1fenk&tbXZg%~xRp@0dNyU-n4+$w9gaLJ#;cyydSzTr zZEZec0e6^=t^M0n{`T3=s{&cBWPKTizE!S-@r6mk z)VX=XCkq!TD%JQxt+Cb=pdaBMZC~e11b-Onn_h{VeG?&R6!_xVg?Jags|nj95ZPJcFn$;~3zYCz#9!W6&IbWqwf&U7*$Lv^2_^ULjRdwAKqalW zT~|y;X_RMdNsRpPVmXdnVkrB-1#+RqrQYTpz#7B?rWdD=9QG7v7V6*qgD5P5T(A{7 zKX>>9Gthe))Vhc6{H6>9yZ_Dp1u8aWF5RoTyJ7)^&!0ZF_?_^g!RF0!P%(9KRNJu%_!?yDy+XQEa;Q{;KoZr&R}z5jA0FPG4?2oP4!!LKz0 z^b}jd=|x7B`-LwhpV?Ih71@db0K<8s27Zx9hoW}?ET8l1&pBO7pA^-{&t!j*Drf+54EtL1 zNZ?o(Yz|dSC}!T_YWvFp5Dd`_ZaVDEJ#QqJY_d^haG}{tXf@#oH>N4ZQB-qKuVJ9o zGxB!fHM2qj#=-c?>*e1!yt<6a0&8}N)|yw4G~XcLL4oE4DWKEgAAwBD0;#g2Z0j{u zHMa+vC|M&dg@j@%(GcORWAi4Ccb4;8Ean1y38}TG{s4;oHOXHluu^kRzu>Xo($Mx& z@2y@P6Y9-Mov|JT)jY~Mt{gyxG+ZWI%=T346`xzad|RtyO@kfvH)+gD@fMW@^I*+n z5AS^oNA2jB!;}1{VVVQHt+bWNz+IAv=|E2GQN!}ii??yQ)TVszCcCuAY0g$t0VzbZ zLG%8QTKTinKR2|H4e-=`MgiVxHXyU8nv(`r_5LMZJFA-?B0P-7PgDQcpVHy>Q}DjH zi+`;lm-Rx*^ewZ(-@ZFhU2Q-ppe*Z-yI(JN=aIR2D^HR3A2y$p#_{5BDu4GlJI7bU z=m_smgM)6wTTg5uHj3+S-?h=Z(zN1%z5l{r0%ji5NepZppKe!yIF{Apj!0I(c_Z%- z{D#axRh9~dpREkecj<4WYwjK!3Fp`tv zVdgb0Bn?-(gmTME>34Hh-5Sk31dxMam!c|9*ZWgmk&x^bGGJhhjoVy4qgEW*$YMuf%@uJD7m` zTQqz_JpHTBs93yzzitxIk?bHN9sBK2!WzA=iuFuX@GpL+?62jq;x)q?p`PV$9!K6R zb(prBliZ8zt38^2Olx7zQZ$o7aDRT;4LKEnaBp%Zv>1yKC3<;>hG=4Lf~C60N<;82 z@85vwTp-4ZP3+M;c#yY@tnmJAoF4q_uw9q|kJUJ*F}0F?^A1*T7A?CH&tTsNN3kTMgKa7|hlIxD(zcllXYsscs5u(1%-GX)SBjUx=#Dz>xlq~Eg|1~G32s~sJL;+ceTgLe1-8td^F z4f0rr{`TDd{^s$PDe+PvpR}&oVO9pUZER1)3O}}tY$!Q;7KZpHX+h7l=bTjVov468 z_TPuZ&qA-rMCVJ*~9qso@nHX1&Q4V=f~VyXO`;A$2l* z#~Tg{TN~L{xGIGw#e1r3*Z=vrRT@vYxE}|e*jJXJEGT(W(cvB`s=@GCcCI&{W$YA? zuh)XEgSl%0hrQGj+`RhAJt9}dtwtC{LkHQoq#11lvFYQILhwe8tvOj)?!t>;5ggGX zK-=r!yr?A*CZBJ9N;$L~O!HO@>^}f`AN3?$IyNjg`f#Qj2bQUS1A@b-=OQA(JIQM! z{uU00H|`P==}`&C+i}>w%{BVXe$0edi$*QAlWst*7el^{`T8xmT63S87hu=Y)M%Fa zh5>7$Dw}$DggrDom0}0nuWWn#^<6%%4qv4}a#_(zP+@|Er zz*-E=&~avsn8$5S=%UooWe!>fI<0BXA&jS@@KINZV-d5+FQ*#WZz@sFgBLespAsAf z>&qjSlyneX^dzy@fkVVLw@=x;70)y;FF&V~RftCGDUDjO;RoLA-xQL;OBl@z5m3Tb zQ&~cox_>~yj&_+mdh-b#!o4y#Uo*k&7|rP^5NF@nl7?WTvjq|P+4*bgmhmeKmTQVL zPYnpOJBiQW=0Py&pyfxd=?{H)$lQN2BeS>K4*Nuxu*6*SRA9a-e=qR_1k@4G$kIV9 zU1odfu^}}`tgx#MUh1^ommJhoEb<@la=F^h<6Bel!kgH1jUC+nX+pSvwCfpWTtD06 zGdGBjOIq1D+1B5Qv&h@&El#Y&e4A=DgzTwUkx80xjc##nLoHj(__va}K7Xofcvn%e zbQy*ZmDsP3;|nZ2Xv{8|!_%iDOLitdYHJ)tvfvWG;4gtV5*!2f-H#%7p*(a&ZrmNY zU{!!>IN!X9<8u0YUkzW&v1a{Vuau*xZ30dqvG)v7MGZz3zy?Qp7#s&y#{eG zRZ}2=M{@W<#u+a--(tph%IPb&n$2=a zF1vh0Pr6nw+E-hn2oWD}g%ixrga+I{E{XBP#x3#1|iCfbpH0%lg>Xd8RRqA$S&u+ee z{QFl0e$Ns_ml=ppZuqLzDrjpi6lv@0Z|KP;UxIjQtDEpq zn6TzROqVocN$^TE*N}M>ARp6k8MPLG7glmH-vTC9j-bBe(xil_OYw@NEC~=jDtSYwOU9UCxdp zcu~4kmjFAo$CkP5%j7q==qtTm05@LXnBoMUz!;6nu${4>YxsPwf%A{x zALC8BYBO9Gu&{vi+)m;krddOpT(-)Ilr+dIg}GWAK=}r=M;l3!S6?vxmOWj_EQZ|F zH3JgyutzU->>gpi%)Fi0dsNy@ zWd;&7Q^epqtgYXEW684aw6FK2(c%a(XA!#Qxjso)d&M`xvBtg!*1GLx{_TN1q^&jX zj_!2>f9K+=5F=1wXCHJeaV_OTxdu$zk!$4Ochms9z9DE?XCw7Y*WV;oE>w8uLcUmh z^JS85pLCAW1wo;}k~3?zM)C#x29V?AO7d~;1Y!7Cb|#3lyj$xbw0nW}7#l;{p5{u? zR;AX_mHwpN800NBm`uOSAs+!Fdh@f-Z&$AD^09-|ycIP|OQw$AD2dqlSeq^!X@Y`} z(x`-saU2u44C`z{a(+$5C(bzu3Q;bPSlC>ePD>=?h8>nvn;9(venJwpW%|KN6-!_1 z&UC#$HX&pkEgyRvaZoI#j+d0AuwUf`9uoqi;mK=R)dM=dTkukNKAvL}1md9RmCUud zX6<%aQns$Po56}jzW~62-!B?1DzY|9>e0vg*<=+rFlJBf?84XlF5M%;QR&Y<@m#!I zWv7}WwK1PxmM=$|Q~xsMMc*m``*s2+r3pr)$l9)Z7l8;+fu+IaN4{JrkXsfIDNdd> zfm`l9^Vt^R)eJ;ES`x}tZXlKLwhFeJS5LpqJg^*zK{p%_&Z>-dYV3z;?PzuIw$@+# zgUoNeYPDuy8jEkqUmY9iXDu6OU11B0XF-TcPl$(l8JZu-BcjB8{$RBoRSkd?k77m1 zVJ_7O_BS7$l_9GR^-oWEte+L!>ar}B)XHWd%*j#r_#fvYCBs!pf?dpWwd|V)Z0D3( z75z%}s2!r(7}d$GbQ+sl$m%>x#G6PTD~#4v!i};_hOfLb%L5LgLn}o7>;|3yqlb83TnIh?kRoX z5-57G^O8ltfCrC2QK6&v0E|Ys7!PG=EfAg}1-Ovf^eLR|M&m?7{C0IfHg$4|zU^aM zQE_J%U<=wln7$exD%ZJeFixs$#tV%i=e>6^hVJHbrQcj0_#9riXv%uVH{w-3lQ@cN zUWVzI5@6Jszltd(Dsb6`PEAO@YsP;S@mZ~N+aYW8xqz0{rUxx7d!4)e_(yz)uZp)e zOX~m1Q_{;H%cV`Odq?~@A^eB^Tul@~y1y0GL~E;+mrk#}QNk-VXnokH(6#{wku0~i ziA6$wTJJ>BzaKf_qas-~UDzH-y3$Wl_v3JOK*52Oz3%ZD7mx`Ox=wp=LuQ_QYjATd zu*qkfa(Yyp^+WlE2NvEoZ`Aq(n)dOyTp$Wsb1J*J7IV{^LKeAlz7Rji+_X zr+^va(eB2dn8D4Dv#g>=ovqK=uwU@TX{>hzaS3Hhi>1m*bv5A|fopy`5eb?oRhrjT zlNr*E_F7^=)nDV%t8cCeA7uw{0%s4oJTQ!;$~&v;Dpu~v%Y)(rNIyes) zpx7R!qs!i4e$Y7-Q77PbT@CMIrM2_zZ1YlP6r@}u)sb~5bQV5Zxe!)YV4H9jSr8U* z&r+A&-i+jCY(A=rzEHDwtt7Jf?{`Ph+%8J2^jPw!ob8AorNq(ENOjaplU$i?t}$PxvCD6!+4AkwtEwl;?u`z#TaMPLLW>P#lhb8+Uk7eadat%B zr$}n)q@)m=+5P=`J(GNM$m3!}n9Z?<`O1 z)rX{z9o|!WKG_K1&~I`{L|Q+?z8@HQrw8LQa$C>q)F-R{1r~P}6CKcTeDF%v4EiC& z`q^@IYvZ^idsU56_%1AvCoTskGs9_&vkDI)k9j7wu93w&aJy3PACyEcZOh-AYK@!_ zefm@#iTV3B_wump)eR7c;FD_G^?s_=@tg@kW*uI-Xesf#f7(Fn`3-g()hC==Cg6&Nfc209bGhLYGMclrPN&mrPldBG z)%{jGJS12%7cGroXIuAtO(oV_R+v6>)+css48imiVj+sV2fh-xn)HN{2gZ5jObcN( z$CU->=jO>|${4Z?N*SIkkgKV>#x8xbs-!zhKRrwx=7*pJzlfm!97W$!iS#*zb?cwA2Mm|i{4VGZEqD+Zqn6AAq=)&E@_+r zI~zQGhaGKV&+L_-1&1p`S|KRf^4Rljp-7K_*fj+6gVr*BalB10#baoP$d=e3)q$T- z3|Y=cCS*sJc!(WsjXD*#xycl|*c|3ml#$<}i~5B(!$h_K5kINtY3?6Ix9;8=Tx21+ zAZta>AM5+iZhJGWO_&+L$`~co-^pQ?mIcV0CZ1zu7iOfnPn=E)T-6kNbagi$*GH+3 zh;ac#ooqg0gWx9NWgrW1oYKiNVz6466Zf**g+j`^B@d3>cli)M2qe$)SmK>#HCoiZ zjNU|rn+>@h;T4i5YfRNoYB^>Z^e_029%(AHyT10$f!-P9ACMm4f8w?Le9S;_vhocv zCQuXeS>*ldR#$;Af`7#L38n;z>f`S<2%)&F0UYZQ2{L8IElEX2t?vVPTFY$@3RACImi@U-m?A?QBa)pk=jXy4qWBwkgyoO&ztf zb&%J_^LS9VTBgu0RI| zr%}69gVQ6J9KyH-)^^4W?KBRTpIRu@2YT$uEz_E{3}3_7@Bo2Zjaf^8kd(5K5_kP> zi#{)FaM#-BAs}S06&;$B!Fd`4{1uOb^>d84nt~5@`io|>L`UxDNzs~W0^fx1{x+Jx zh5f8GhKWZmb@b%bUMsd*Orl6iwU;{0hz>jIrEn$1m0EX=F>KT|!LQ;R`!tzz zgo_h>ftcIY(hEBdG;ZNm1=zmRoD?PR}Xjqi%-$Wawz{rg+yu;&P# zS#Mum+uuCbjIV8{J*`$Ln_8l~vGhxY{RARK%4v>SgO*+p{P$nXrbFtQ`=r zdta@zeZMEG9*POCMgL<|>Eyciij0=Od4jnmnyIF&4bIvnwNZssC01ogoW#fTUAf9@ zzU3~UN6cICe~-EMpg%@T(Tc<#<0D(I>lwg&oxfUURJURcQ9x4d<--E)RuM7PuE(W07eI<&u!>Iid*w$bGu~iZR)E5IFs-M0?`9 z5|ZV?uwhx2sFCMdwl`s3PG|Xs1j4~a4Yf>?s=dTKzYrA2p(28K1~eQ#N?Sj+KK0Ar zoXP&1_7Lni*Wh5)vkMrOxCcxK z-kXy+`Z9ek85acf+=1ptJ|$jwu)1&dNM-QL5Nt(M@jP-fH#F&Y+G$JS@!oTQ)MnEC zrcT42pvX>%#TS+jH(!hQYAEC`5si6SnRUS+ltpCH$*Tje-ybBibcukZake4uV8=84 z(j?ojvO2%a)~QqKifG4Etxk# ze**>&`GI!YuPKpW4fzZo3kJmXoZF2#N7rfVh2?R6)WnmzPK`6C985$)#67%XWMH;B znAv4hyWH#@a_^RIoRRigi))5C+jB%P?o&T` zi0lU(k#vK2Tfx8$7mRj%l;1e+ET1lpvyz&7X&VMLlvVq>- z-a3%ee0CS(DSw~@OeL8<9V*{Kt6pQX3YFZ;?$ zWiqM=n$t7z`8la8r2?!8Xh(gC+5Xk!YSRt)83M$DF#P;D*>yPqjLDju^rY^+w}MPm zh-#~yp<(VigoT(tFji)3be~R=i?_F*V?}Ad6MeGE5@+I%w3D^>xIfQpy_85cMhNR}wZS zlKSgXpnDSN#b;!3$z#X%bq>a3H?YGIvRB0XHqB|jB(NhGH7c?HK=g3F5PY!@)U^FO zhA~;zO9qQI9wk=$j7MTM{Dt(}47*|)N}f1&hKGox3o&@g(4xzN z?ye@mq+;7143613TaDxih1DCrs}Kp8n#2JK>LhHuPF;x?b`IlrP&Gp7sR9t7h7#6xutDo@Ku z8{#ja5QiBu1II7j!Lb8OF9`8<=eMmg()wF;e3TW1^fmlKrP7NraP<*j#$Q;md31y! z1s&Clc8F=qpYw}&#l1*5kMZ;LKUHU#8`E6;+P_-UT0z5Sg~9Oe3!l%{a$A9;0RTQ&eJNS;&! zC02{HKHEH%U*LEMt)^v|9xHFHMKP`o#LzQ1NJ;9g2j4^^5r3O}|7qZ%5B160B=<(x zuy+r+3X>a$dFx6|Zjf3^U^fpSH_9p{bqD!4q~Hgua*3nR=y={H4VL=dw z3)&YTBUBZ;ERL7DlocrhU$+N{LhQSn{-!${V{+B$DazCCt<_q1Pw)jdD`*U8R7wjD zHWFJdaa1+l91&G)*+?FG4zS>U$~jMB`W-wi295{bKg!|t7sO&0_$q%Ph)ltlj|Qrl zB!3>0WLz>j$}c_sh-Ey6Jw$3FKECrIjd;Dgo2yi+hhVGe80s+G@6oXOeh1fSy(3++ z^}TYGW*@mKPjKKF2>MwxY1?{Ko&qeNEV503gXeL{*KNhfVX3&7f%T4IT*2&>2p<_8 zO)vqPTzX#Fc*v8Ps0F<);hEffkgBjaXk()V2pKoc2te}3D5Gt~^DJreB&7kPB`%x(BEk4jOB zF#C9#%z1hjS+@t+X8WMM7{wQOt0+Y5jSga zE`hMgawh<@O44N{_Zj972GXuGDTu>TuMu9`%J+CWUDn4Es>KxJDCtkGjss_ zur*CV1p~~W-NDDfe3PRTxX#2$itMfACJ8PLP3i2F$ z2eys15IzDi z@^Zi9?Ehp+hUKuOy+CBTTRx{4%4?$S%&yJx}hquYkj%*{IO>c6gY2p(fK`^co@{og`KeDN%Zl^cq4<2qmZpks4_UgeF8l zO6UoMkZ>0I`;T+(9p}Tn;TkrEc^CSHlDBE>hgh6Ob?mgPY zES0cXK5svHMvYq^vAR1YnB3hMY%8NkFcD$;CTOL4u_?G~@9?MBp#l_TjQqGf=JM!I zt9}XLo#WNWdaMlQIvW%{KW`p?ijYwmuA7po#U~f1&sXr=v4J?q7kzdnU$O~} znxA=k-X9?;asasbjZ-s=M@49TZ?C})x z=&?rk&3b4#~1 z!Eu#;DnnO3GWO%!d8Y3wzv!M@PhUsRij-NH^2Vlb_=S@stK%z_o-#CwI0K*&TscHn}a&FdX;oK6}bNO6|8 z<+7Tv86T)Vb-OtAq5IF@tM5-A|2wB))Xo2OdojQy;OGB;VAY@J09x$-yV+jeXl-rgAoQJDEEd3!fc&-ERV+?9Y-Dv-rDZ?))2*|RlTm&+Jn0<1UP+c6d zZL_DvzJ2V?9}bh?Z^_!crjFd0eX<$=wFv5+FF0tNn?2I#^Q%#(lI(1oT*u2Gct4y* zSxU^;di!fTdksoBD0R1`4wKu5f>NGBgzLQt8!{NEx$hJ!AzM9C77=Qh@1=U#2$`4P zTHI+nv)brYwi#bexZw-%syvfcFSov@_%S>SNLu=mXyLjQKRlRnCB=uh*yw&|I@34p zlBwf#yZtVsMb@yhW0BE&3Xqic+PcgELb|8Wt=IyPvAmBkD%3KK^e(p+3<*MiUHOx0 zKX?#GdrKR9pI6UJ(EL0oxmG!C8U>J@w2wr4u?KIbs2ichv`x@b0Cx(?EHdhXH~e-C zUtiI8lfKGlpQr!fm!Wi#kH^JkFB1DNuLgB~dpFeZv-8G4t#10l4_$cxjUpRD|4ZdZ z^^PnG`pV1dt?p}vR?N3r{d!MC3etV`Lw0=mh+=}DWl~EH+i5%Xye+?}zR*=9eRx~N zC)NJ>$hIr$(l*Ri*Q2(X5Y^v_90f_dqCPDm{ z93Z#F29DKDuT~Oh(B*?Y@S*t)B|qx>mYk3d8WEbBkZ^FH>uj=W+2baFfmVnf0w!-= zTit;5Hz1wvk{_`L6gn#XHRPvHw2!Q{NWDr0QG}0}ZtJ$aE_={RCnh>_ity*@Z-v%_ z?h^~oS?X&3j>hNcUCd2E^2nI6UbzR=_p$S%fcjG<5e%aEzBNbP6JsKe!q73BR*9|u z0e&6G_^M@`udQNt^4K|r%VdUa!M8L5v8z%W(0P#p$w!lHWv%Bq! z3d1Hz$p~<5<<*}rtr~w*w6FR z_ZxonJmb*X%xM>n9rSeVS&_vT9rG^EC2dVjU2Z(3>OPlc;l^7HsqUvCHGu{CpvzUH z{=VpkCofq~$w-9T5W*KNs)t1~WA}LtPw?o-hD4I;^-^4#OQqY^O+IVrwLY?s|H(XR z_N_k;xA(xoPODoQpu)G!tLg)ESN4*He{27{&e~w2vx(~&dR&T}*V+)i{r%rjQGqRS zT?oG?YvNF!mP(xF8=RLyv9-DhKp5`d)Epv}6j477H-90)CN)231A&rtknfgDKfPSj)ZD46fP`T zMX-F~24Z(<1;kgcjB>uJNr*2CZpk9Dhd)z`ydrzUUF!#Knwj7{hX!QhDwjLob&gs? zMC%vJ@m-{aoiO()L(l#*ADFm#8^5o7ydFqhszZA5fa+~l@y8Qsa)?CwtIVp|7XYh} z#;hB+dW3m)w40JmKt=vxFuA$^CgS6VZO2-D**JTfXro5Xw_?i--+%K($_h#nNe6Ma z7`yICqu<5gzW~y`-(KD& zV3Gf);a0=r&G$5}c~T2)t#bWeg%r_|?ITykM_;i1(f#TXM;Dpa=P1^2?hl8I%Q_Cd zPvDt-h2GGti0n-L_VY+5@(D$*c z|77hRu$ZNG#wHFZ?n?nBbxc8*5yX>&im$`$gVHR=wrA~6Z^`ubZMzxTb6w$6`#yG+ ztd(H@!~Fak5@<1La$^yqy{|mw(1C)#^YB&)I_OZ4LX(I#$?93|l;RpW%&~C%~q@5n6r~_`$l!O5du#IRtu{rJB zbd{1#)V0aAXU^YP_frTRppjeFXK5nkBT80I++Y5%eWOuAbwN^ZKRBdyp)yOtV|aUX zI#E6ig0D{g7bfBykQr8yDR3TlgU`Y7pF6+tTSQjPwbZw`V%dvi4fzW+icn&NhcDZ& zC%$v`@9cGz+a!_+X3s|5!uw7&tN{ANY;pg#4907NM8$hH8EeL)iNOZqAq&O^mab8+ z#dN>4#&!|{3{0!`eY}EvG)=Ngk?g|FeI9kP#b5+zPHM2{Oy8p?-}Cxl=Xp#p3jURFD@3A8^aDUS(KuZEp~2HuaK6If zG$B}R-~~@r!Zky}UfU_?sG(k)J-+e1T=Sj2i_!{r6&+Vxr6dhj8?vIs#pvzD>Pa0R z!e`iug?B7yIgxF7D^>~kAK1G)#X)zUjCj_t`c$#S*$qY&tg7J2=X!FC)<0Yu&oh9f z@eS34 z)4tNoWy}Yttz<4n90^EW79Bnw8Acfk5BpkEby~~I<{VUitTeCA+t!oTXS!JdCAYHYB?S#uF$%Rz9Qt9Q zG~h9o5v(TKMlg}^{11o-d+bPP58A?H!sbwPN!ew6XI%5Hq zW_}^IS8D9+aK;7y(NjgXGC&A|bcFeS&aVB9>@-rl$;UD=vjeky#7t9o6{o}x zD0P;|4zGRg7pOXJ0ctW$9~rxM6zbFg(kTqq{4loagbP~F8r0ZXNfRqp%sc5))`MXg zJ#{t}TQn~m+p0lN>-!uv^HO8O8_F-P;lf+>&%??$cxDTK+r4KO^(f`tA75xYOK3_y zydew!qFsk_#61mGtYZ|!BP2|6nC;vVZ_1{c&2q@=GCMZ|iCG zzF*kR;hxO2xVG?j+pp?OruDb~-Yoh>{%AX`&nP|btC|5Gde%{Dj59pTIinVrpX4d@ z(u%n@Cp%s|y^@;xWPZLfbo=r09Kz;+ytD**n(t4GECa{VxeQ-@~%Dqc~kP5E2oHGg>Tf-|M7IM%VCvlrJy%{E1sUlxDpnpfyD!)30*w; z!MnHl%r$Tbfi&hbWtN|OYtuo=j=LWh8hwba?NGUs=|lnAQLrJSBq&d3F!TG5HB6pa zO&Z8U^wv+N$4$4rYg*v5&^1&-1H`Q2C($q-x_o}|9=zr8Rzqo{(ix`%2T5B1Mtgx6 zLP{|B({|0kLMt{u$Ue>eJiv8!+ydrqn-5jx(_KDVbo;5hy19223-YafUpS4!w3a#q zO-p1EO&teRaOv^}NxZt6#z}a!u@CLwA;X z3pP`LZBFJZ1Kn&2Mq(y%HFjnX|GuS)fluZ~hx>AiH2{2avPOnF-Zis8F2Jnei}M{b zBYvN0{e3`Fo=b(#dsJ>M8*7Zqy*PpuCk!nAs5oE|WQ&w;E!7C!u6!jMK4 zfR#t`!f=9o)-DcH01CjCO{4Dr94;XJ zdmW0NrvO|@1L0z5{%l+*r1V>-0vs&i>tW_~paYnk>+-qIRVz7>QgA@`nwq(_x$?J} zJ+Rt}&o_@7xqe+?I>rR5orZwxom+aS_ZdFm5c;PfJ^xAumDlVND(2y{@=578#4~ls z+uo%h+|{bj@3J^;#i?j>Rt{5tP@~BBf{MN`JDn z|5!c@EY>_#f`ONA%@UR5AMb96Zs7E1o6|>I<9Fd%W=`TIdhHI#L;BAwtlnz}=#V|@ z+Ajv~Vf?%l{idzA;+4;@vNt|{hEW6pT+b9$iCI#-#WP0jzw;QkFW?5>nwAB^UURw< zqYX~9XhJmmdMG|ROU#GTf*lP{jHq4f19$*b*ug8@%*$Hr>?sBrokF(~|3F0spilM_K*k)&Ycv^)ULxsG^CGlEj++ z07@qB;P;8b<7cPVa>=HnC1WAKo^kni52k=0t0VuN6!-s$IL-fo_^!t@3k|wadL6Os z!R}by8RmeRS9+4dhD`Rg9iu*3fJN=n@hPoe8V<6scG$BU!jZVynTWWqyaG0*Vk(M} z<8WMIN#N+HbQ08*M;1XAPU@9ImC@{!OEc03zncHj8wvN2ML3k9A33q~IH?gXNO<3> z6+0_}(E4hrM1-!hB@g04M25UEGv0lhlCB_B>S`CNxQMEl(Rud zP5RG~PKsQa4qwu=L+O{z1ShD@P77^t9sEq{R}e~CybQprK-8rbWy;iRHsjd=S1ES$ zV>k2N*F|B+ETF(r@uVjx%l_nNA_2C zu1vYLm()EKPu}i}d_e+txZ~g6heT{hcKEVzup+)&);Ap{2eGX;vpDwh=TibTy(#`D zSUN?~*p-{xe6M*IM8M~qj``43ehojhcJ-{PsG7?6KAC_HFOV5OmeJ}KJob}n{@vk* ztEfWs>@>H^8$*0@Cm(s%Aq(61*h15MDeM;SuVi;&RnFI{L#&lFYcs(~bC9Ad*kggl z9qxwT^m~MzY4z$0YD`&4k*GpvI)(J{iE+wUWcwU;@{w{nC>n(&VZ}4~C^V{%-s~Zs zoJ5~nE#(MZL0G6|_bE}HE5jtQFn<7!mg*3drND)+Y-**4pSyr>n|X9U6$+8>zrbhqP=m{G zXs)ehSk$7rTvXZm$Rm>;lL9}{D7lsXTk(D;w!W>qIanm-%k=VHibhD1YU@BQt-`z^w=y z4|TP+AUh40ZhY8Ub52EfYxYOHXlg5@13&t>)ob6{p^+oWJ9`Z}wRti_)3)}0k5fQg zOgS1+ayRu{#`~c-)yA9q<;i`v%AWOqv*Hxo0#2ys&%D8y+QwO19EU*7Le%TOueW?0 z#;qQ`mb>k#MGWq`f&K)BgYlp^97f*+qX~$wP*bwb`KosJ@y)RPp0=A~t1m?bpiyTT}DV2Ma2>Dlt2;OFitFKS4o?ZLqn@gJLlJo7t5|FYLea@(tG}=`4&qB=kfS4+ zwP$~Jn@EFAb^6-wJOE$n*fQkz6msiD&>}!w;1Q+A360nquj!da#lS$V zs{p;Ud>J4IJabjO(3cN^(#LlGs1*UQv#4-A8ND;{P-010!}rxl+N%NnYw8#)o5`UU zn?is0Fc0|oI$RiT{k3_XV1US9cm~>x9aYH%mH85Z^o}%sl$h6T)ef~@Uo())BI*+pM3Zc8t{k64I}$DCH~IET&O8XTQS|Fz!6q}c(hv=nYzfUUIK$;Oe;M5%-JNY#$sVdG-BGga z0IL>_m!!xz6avVQ&;v8~1G#xK*(EMY3}21?Q{rD4o{@T?p@Y&B$`6L{+&&J~zCTpY ztsNwglmEyLb<)E+;A>PysJftuZAhpMY;N_`)%#1j>W=}WM?#H?v0v`Cm{V>6p&w$dX zRVJM%Lc8i@1TJjbe7AbI*Dr+6Ax__W(TSJ}l6qU$|MgT2pkY=;P-g00Jr z>$NAiwFY{3&7KnMJ5hxZh;*xKRf-DuScrN@O{);_;*Z)^Fr1H#3GcGoYOl##=t%Uv zH&5H>u+7msM#o-1CpimKBP+_+W8aHCC5HMHBp)b?6$>Jq;P>n`-=Ws63 zZX@m2_sqk-F}fWM(oG#IISkT*i#VlBLkeuvx4g6}q>T|~Zoi$kMcPS3)4G`kXK}Uq z5_2>62ghe+yur%;xjQf659W->;0=#j@@q(GImZB13-z&1VllGhu?HB*6n1G0eMi2` zuD*m&L+=<%HH!CW*T?a`TLmaC{`p?#7uoVWeg@epuLgc48;tGB@DP(~+`5vrvVHt* ztM_Al{dg^;{~1ohFucU}7=Y;2THjwNwHJTXv_`VwK{#>fsL$3LBRVhY30DVwIpF00 z_Kj0GOZA|NHe=A1_fDhol+2$Y_{HjR>Qr_m%e{D zNjet(cB?55g7@w<=9QCrDP2I-5U`@0EX&C6YMHSf(m;!*5TfUV;`y&XJUJ)S3i_r+ zd_Wh9YK16!*;fnCCYQJtf{QB&a9vYIS@#=zD>T7m-0N1Rx|@5wWPPW;Lz7i@y}@`Ew4VWhY+BH{ zS!kHO{gXLCV)bPe-tZT*m~4&-LGhrX;(;39A)Y8-3Y0)qODhpZHs4RD@W?fMe@&c{{<(6X z=z31wd)4ftVLAR2QPMs`k#R#HJV2GNSd;Ndn-lCHEzrfA@VAlwsd_2izN=#Uesoml za?WfFmA7}=QH5;3-worm@I_~B2i}-sdI-(GX_u~C?kMuNW*q2GG@~K+<>?4zW8Dy9 ztouRxXY16_I*(2JK{dWNg_LB1t=YWpIw1*`77zVK{+(%m+-h=nxMs{bAwsWD1FGQj z(j$L#da_rmbhtNc^yG@)r3Q_!es4D<-mbRArwO8hbpni-ffD6Q`51-}qm3FhMK*Fi9K3yq#=GkM)a=Lub zG%>{7))ETQ0a2*6QfH^Jg>fw3slwc%03iZ>%vFFnqDPKK&zBo3L6 z?z``~J|5iZ2M)14p;*>YX_?U9_%Xm*zWcaCXYK%!QrFMMzRSo@8gG&kn~<3pm+?lO z)IHr7`zr+cpmt5h-K+y|&2aIudZY(4{+_4aqujk76tDGOB%PG^va)lNIpbs;);j`> zj__kwQ?~N@+o=GK!g}@Br$>+SHBH20@Ha1mlLr*pE%wr1CXP@hbYztysJ&{8IK>KROv*$>ys-L**DDm(S#$7z3dB%&H3)kJ4L#*(Cc<^Z<+OQ z4p!4`qBf$(f!g%gMwe``Q4Qg2B?if%5lLPW*x1=IC z|Gb^#V`%9*Gx%IkY4gl z^w!9{nupZgoo>;bcgPZ4iQr1$Kpi!-qq46;Xg$T?JJ&TN>#C{nw@g)y*6ESb-q3V+ zx)k#$h_Yp*mz$eutJ;Ie@9OavCYF^2ACK&9DAwr3w#c{^V?dyTf|MGKznL}04p(zc zHG5Ia+8;vX&?AnT!0@A8 z7za*$y%J@%dVi5!6$M_M0&*auiq4PwMCH=1x>{A3agRzoT;jg5G}P(Hc$KJZtGAzV z*i}PuaQv%!pJoABX81~`dRvBI3)UeOnSa3h$fPsouf?5icYGa!zo{5jhiWvr)|V)} z5Wbo2Nzmx;9xd#)%=ADMUL7SDETFpEiKAqND1*D5&pgktHvK(V`LGs$*tbBY(8FKn z*3MZATN0ldaSna-^f-l^h0kHUPdI!?2P>XyGnXNr;SBVHG#Y0Zpl=k>xCome>|Qy= zQFzvu_2O>tsOC_^-s(Ak6M*4ch>x0Ho#TK9hDDM*at!!=BQg5M(mXmFBk`4GDKbl~ z#SiCaJJ9?;Y>u=(ipUHR*r#{2^{&wPBvc)wXR~gKyB>aBs*H=5{kzwk2#Bcew6L8g z3t+MRR>&Ug8yKo%m|3t?|GCv~g!|I6QFe+qm-Wkqj;S4EdAJ*80!xJL8dxf>qofpE zVS_R`YaKpWd*I&69532KK;NpMy43QA|H)mCJw?R-9Z`dq}58wVh60=OEPM6^}2`3Y|bj#dl_ef@KlK~pUYA&AZ!&(0=C ztVL+gpMu8_U#ly9F@bqoHOr=Ulpw3BZGsofgHPl;NW2YF!_r->%uwNiYjz-(d;q7v zp(EJSJ*rN2q~L9$Zxy|vXrLg=*1Htfw6jfMB6J6cnQjh-Q1#XgqY*@kLr3rl*)kaW z)j7&s!ZlC};|2Fz$sD~%6f7ZkcOWbmX26kODsGyrHF{!AFKwDe8|oGHd-<%Zz;EP0|J% zV3XyzU@GyUiGO28qgw|q+dIm!&^}F6jurY$ZL@Z8%{x2!s}lcd$ZNkE&iC9+Yf7yP ziQ==93+hUKyS7ZZj?(W>s_|8RtM8-tHfE_xzhqO~bu`we9h*W|Ty$%ISiem8rz~1! z$RQvfXml#Gc%0g85fWvC7oK*T2Pe>`)~E}$*n19lR%l6S9FvuU(2_<)$IP8f&lN3b=Fc5*WsBS>h08jprWRQmWsjL)mJoF%?r z6^-2il1=jp7fL-A8zA2b6g4A-CVjs`q`t*`-X90fFO?!LYW|)Oc>fKwA39p4Ueq3U zUQN4$Cm(*HSc>eeq1h8L?+tGM+(!1dPHDnF|2kVk`#UAnmp3`HH_~YOsCatUKDMd;2{3wxJ~cUWMv_vX#=!E+qgF$)PSf@d!v;b-_~OOZ1_q-yRF;x0OIGbg zV14>cWYEsJPeM*#G%)Ho_t?9$W$8}&?LPPsz8k0hF*FJ+vZMxze)2_$; zfEI1{&AK4gOmC(G2Y`G0g{NQE9|sfmymAmn>48rC=)H|N?8Q+o2l=e=JbZpC%c=5U zxf37FZk{OI-Qh_gy`4EEG*t@||?tD3Jo zH<+x8@(~rvPZwdwr)MNZRLo{Ss|RBn?)0osH3D?sih#T;v;2r+j@(lA`_uhdk3_xs zkG9_onr5~YR_I$kWhK!ENOl@xnkkj(p=x?yr`~36)vm2ex zow|ja0MhMS*9tGwiLv=iDt&1C(v=E%8MjF3-!EJZp>$hUd~@RUP6^wo_ zUze2#yHE51cr#N1-5o+GR<(@9Raj^aXnW2c8HC;hLkZl(7O1 zafN1S$J3ONsjb0L5FN{cIsyA426Z9mKUrJ{r%(2aRjAzA_c2AJH9YNm@t20}{o{S6 zi86n^gN5iSG&jSh;X@&kXx8`s?y&&Q{%*^6ah{ z+S;YfK|C+J_+SN4zdcfJ1*92$JLYQSc*e1J*hEjc@5s|zpP{Me(>Ru$FFgrPEUz!A z84c6XFRE>?pn0CCdZU)k^x4KOONHGB_=c{&ogC=du8^nbYyc;V@tj{B^0aHBq;jq| z!nD$9|Fk~1^=(ojSV^-OOF5oWj84qo%De6$`y=Q~wx8SFOne!`c%ZA)R!s0uhXh9N zPr3(jkd`|yNFBQ7ay;GUicLP_8l9p#GFhNmk6gylwMS?5-S5&aWWQn=<H;qbyiNQ1{8*eP5Dj59Uqm#m0}*7Xs`>umy=wd-LRVuGrXb@0@MDa6y#v zQRH^hFGBP*GVv4{WCTpri#%lTj!6G@g*Q|nN4x%ScZfb!ARl;D&Pg*6>JS?3z7z?l zos;5&t&U65KJ`j;|7k2ips`}Ne%5_-eo);5=@rmRl?&5^e4!r^8&j#|VJv)AE_CT| z#PukpsTZZw-fo!Z4 z*A#8WC!VGim(tr2e|V18df%_KU`#*Sq2l8;y?MOw%!g5o2r_kB833~NS|#-7VEliZ z^vMfI@ilZdiu;T`;-92ll2INOxBP7HsPY7vV-sm>P$7MG3xW3)F!jDqBu@xzid84M$v+zz7aE?6kOAF!$S*$v3=8X1I@BR>*FEf&(KPhwk zw}HGzlR-bqr|Nt?ZSsq%`IAuh5LPv99MB&U+)|p`zs`J3A6PO zCs!46=rO!jiYmfQmqwJ$NVo5BvCM6!CRpavrRABqgU6&T`A$4*6d;GrPr0xZe`#m3 zwpBM}3IiEzHg%1}n(O}TvQ+nU(+J0TRQ#g-%M6RF8g#A6WgY$sfh*V<`jwi2hC`bX zNp%Eu#IkP!LQJCRu6FEYZLQAbndXgcC%;XlQ~6-aVGg2%P>A5zvm6eeS`?fcQ)St+ z)v6zK!y$q~+nd>28|=od&>Ygv#`*K?AkC&YdZcdvT>fzoD-*K;-qx*>Ue$pt@$Q7; zYUqt9P{$^;Ka@U9Vl%k(om)+?F}e$fE4Ll!&zM;$_W8bSt-gJvId#>dq#2deR6w6` z+fX}6sM`ox!BbJQg4;8N40_6_dfv z6(8TjVP68OqwW|60zL(P#k-faeqYUg<>x;5eU+iyEV}zqY%Y(>=I-6vvCk%tA%5Q; z`zU<^`TMFj0x0?S)h+M;T?!)|a%10RZ?o3=piLYAtGP$0lbHcyFT#<>5E>7Lo9=-L zp+Q?TVH ze@xf1R=wswMs>x6bV6tbun}UU@nsGkz{`=cfvOZu&>h zw|yrAQ_ZpdZa`D&gLcvhyJjwSEYF0s*mX#TNk}*?RjrQUHbXZAxSlTAJ+=ricD&nh zUN2NrS`}rXB8bW;S}b8sa$OtE=tPD;ma?=lvqf1M`89^ss0`LvYynbItPwU5B>quB z3Qhgeis#NTz+@eM?no9+V5>StZF{!q6SK0)z^`mvFr%c~n6!uLs$T%#9d#dA1vK4oDnaCCly3b*!{ zAAqF{;fLhR$d$iK-}T9VN~aLUB{l9c08Mb|@}$rdG~uvW$bh90-DKC*Wz6dX_@1?@ z5NCGJ0{;-Boyx4U!Hl6IX13!^?zQt%qxt%wS;< z3>9jESQ8Iv%&rTV!Vj=bLJ}4TwPY|7@Vbi`3RK_i=u*5NngSr_?_C+r|kdL!qD7-O2Gltz697w8x`A+G0)Y)<1)V-5!n6RVdlV^A*9 z$3Dz?VwLJL(Tpt)*BVQ}|0HldL)32>&60@hNy`RXn@Gx~0emel#*8wl00fXE9}qx_ zg%6p*|Z=&-57Qo#TO(4$tBuZ^91PJTyPe4(1&YNqqeFaOE9ON%` zgy=ybl_p*w%x?f;UZRdzBQDS6ZEV7#4hjwX#o26l&~K?yz%E-MTBb{p^DCJj0OOmq11W)%M|y`s^fCOL-2-0N+p|WITAU~ zu^kabw{Sji2|emLjd1lv9L1ZX?sPX{==_yMVxujh?DYQ_56VY@fer9sM02EPaz!N> zJWflY->SzLs#j6d&?clHX^FeBaP+NlTNhHH=S;{De23)vO#ciCXy)`66G;D>Tib0> z2hUf{&kg0cXnHp_*;cGZF>QfXw)CIQWA9K1ir!aak85w$%!T@u$*R`Vm2x;k)L*v1 zV5H@;nPbz)Se{8`^b9#;Q4zp^GPJha;g{^39`s^NW=eb$<~$kjq*9Y5z)Q>DcHKWP zK~mWFp6=J+g{hh!s7aXVG$xq`aod6CsdpX}ue#8F#YL;wSAt}1Q8ET>1b%$r=enBZ zOpu%fZRK+q?uhj|aHJ-CD7fj|hO_fV3nx&lj8rz>q4E?8++$r2*b@+T7il5S@W~^K+L)wvmu=Bcd3fS-KT zc=$NbP17U)lx96o25XSkotRYA)Ul?%ob83QjZGHj1FSoN#JvgvCbpZqEx^w$-CCQa zAk@@3vg?$-rCAuphFz6D5fB`Yf~|>wb`&Z}_|+lNsTsid>D`L_?9LEM2s^q2C?_@H$9E%?j%>vFA%V}$}upwJ^(rt z0`VA_v9jqXt722f+Jg}=`q&i@TMSEC z;fT=^rs9yC3n{YF7c{cb#5_we?XIr5y0}4Ubce>N`!XA@s#aB=Lysicn2|)(<=(M5 zjCTnx)f)ozE?|rUYA+H8c$PtKM%9u_m}VQAm>l<|vC54Y4+$4dk4ATdA9VdzF*9|c z6TK*?J`?+Y?biQ837O5Pgc4c0LBV|;|Q!Y(!iPoxWQ$fV>!~nWH)*?f#T5Uh0(h^ za1OisfAWb!(g+1(#?GN|Eaj;rGP|Z$Nv>XcktpX4^B*NsJ%En9vKuT_O{{$z%OApc zpqwg!ZJ<$De|mH|Nzv2d!|2gHd&;qtFoD+9FhOim!>0r&8Qu9j_6;n)2g$e`#%76+oOH| L(Y>-ePhS2vl3w>? diff --git a/profiler/advisor/img/jupyter_report.PNG b/profiler/advisor/img/jupyter_report.PNG deleted file mode 100644 index baa860a7893e1801337916aea37475ea69bbaf04..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 34097 zcmdSB2~-o=_BUEIiruY9_f`a1w@jJ0|4t=mj5*9sOJ{|;99HOufLp3@SjzMzPu2e2%GmT zyH`7we5CC6`-q?Kn>igfb-Mrivo4`+l+)9x?f1*JX9nIRZrOHYU*@LPb6c)lzpEX! z-C^DN+K>y*HCxUf+#dqmd699~<%0fiU$q(GufKEgyLQ36e(?Pt8nIkQRV9KNT7jaZuvA-{6ymimuMQ8K)@}Z1N~WAsT*%;r!MKJ>la|lSI(&} z0f%G6xV>sRT&54|EMIHJu%b0bx?N$NPEAMTV|vuhrVTSy^Ijkyu8o^xsOsi@)E|)F zfHMSDb)=A{mj;(zg@e!VeC!Vz)jj11E&^^wjKg^s91yYbQDE=bRhaTzHz3Q$iBP)NClj2KBr|Oh+~hBTFomxy6gNw}US3&W zc35fWl#PhD@1ZQgoodb&rAP)N*>TM4K1~bvN3C%p>lTW17%^i!(MwRr+C+{0Y7;r) zY=c!MkoGQ%1h9fjwVd~Evk`gpF{Y4YV|F&aq3VPW2#2&Yic{jj?}vq%=#4DH zh1bLslg8O_)LI|EC>PTDzgi89)_mA>Sy7HuU0zngGqaRSGCo1gpJ|K;5GHzW1yBhs z`foW)2wZtw=wrL(226Vk(UorcfY5f>6sDd}YZ+}xD=(!5Y#3RZCK8LgXu0#FIWym& zC{OZ+ehhX}-g{Xx+-8WAN~>D?qYQy5bgHNAJ2CwRIuyCr_Oqe2Y_{z}fi{ z+)~M)JZ`MwN;pc~`7657(%)F}B<^0!i# zU#PoU;ai;~6R#$bx{LV-)syvwK^c7OG*9s1JYUL?9uEtAd16JoGd0(>QxonhVd_!X z4LS_xV2``MP?bXobFI9`X={|x%+`ZbNLBg#yA56!cqO;m-v!}hX*e5u+~8B~RNqui zrDC2cr3kb^9;hT-E}Pwhlpm1|@tk}-Uej0xH2AvY2!9*xRR1@Q{&zSV#R?ZicR)&g zMA-V+$|;7PlYu}XVI6yGgRv{T{oEcmW;Pzct^%d>F{UTQa+#Dxp-!(|f;P!8w# zrNDnC{V37{$B-Re2rIT$b37XaJy4hvwTiih^3oc(nhQ70bF)RW<9rfC`5ZrwaPMM` z@76B189U{@O_}XTYjM9P_jp68_CIrzVF}FL#P_wMrKFL~0gtG;J{B83h#n9{_r62J zGGrciDLX$iGL-DEl0ye04xI~QAJsfJPr6(ffTSR_ycJ4c<*Q$$z7y>=D36oa=to64 zMJrC@WkI)AbLoka^n-J!qL&u=Ggj#R92J{$c4KvnNcU$eooKuuHXzDcD)X(W@-&Up ze5lrJX8k<;!Pshdv}b|M@$JS|d8Tz=?p?lIZl|64c-~)^;XbY`et_Hv_R7)MbbL0T z8^smXrAJ7G@>rtAz9i4jEWfeMW6CcZRt3-1K9ScezDbQgjw{~zMMtllcU>+i(fIc2 zboS~c`62J)O?|*lSSaj>bOYf12p7xp&U!cyDXno)rK}7?Z}=x;K)rxew~vmd!MeKi z-5X6hkxTBlUG}JwtUVJ{$Z=~Eli1Vg@8C~ah14nS)ManoISj(Lh63*bZ%!BG$7}wO zw6^f+h!X~{%C>)t$l9iUi~ydo&c?2zG2LQ*w8SK{%NMw@G44eX!p0MRu(RkL;p;#7 z2us@O@*nRn!f96dCDz#0ll2&UC+?bO+()gM{IlA3wG7cZkGP&-wY zR?JaL$UY|!_pJasg4Xj*K9lr66wS&rynbiCx3X6s$jE&3yM@MQ#myV~fV`Z`-2~DC za}O#SL#XH2wor~L||gu&pFpX zH3v*OkV4z!b1jE56Vw^2%wH!wv0l7bx5J4*(s;?neQl$5yt0SbxWAEWDA>%7D9{rq zz+!94C6-!C!+xhU4Wj``l6wDDtmeb6+ylW?c;dk^?^)7ArM0@1^R*dH-rlkXT9wBV z0=vQGH9sRs4N|C5>08KsgVj4zc{L=2+zcG2UZc+IsSn18rATlZ=a@XY78x?ZiDRCn zB|i1JiHsi%st7M|uP1Ay7`N45a64ZJiZ6HYiTENzOMA9*uh=0jOQG7<;~l%82e$Uy4st8Q%|H+!xGs1c!-6#xK{SO_TI zT{;1fWH9*Z?@Cs#`XaXizdu!PbjIv?SVF+oV{%vSeHmc^Jaw!5W+ZFn6qt6?bUwqk z(ir2cy4kx7J;nD{9zSQe0zNl9RJSw>djTOSn2=I~S%%+Gw;;5$JwF2a&cZot)hEKY zG6|V_)?!tYs8Ddy%y{LVQ`fap*X^*l|8>0o)P|LW#{MQ}7{%W-+39GmZU%Tz!N7d;;Q5{@< z>bvj%>$uyQu9f=l7roc|P4hq6xu6Mwi2iGZ~xC}wj)Gy#9e1d&xk4xew#%H*`Ie9;T-To@_%dnX4vK5^nq!A zC2<4e2^vdiuxwnskJ1L8=7hc_rc_qX#DCd9zsPdZjdb~M zBIsO~Dtk!t8(`R-2T;aUECu49Q({REJ{HeAnZT`~P8fYljdzJa@*a#C>@Wxwo?#@T z&c^JfDzmE&@oLZJ+MTk0pkFySXU?g3C%Cm>SzW~tnDVkVJ~X#c_eqPyGR>uDI**|BN#OAOnRSA<;_K(0#&b*VJTeW zXwh#~M+84sZ{!x(7L7SZ<+xucfEuHcMlMiGdGjg?f|$E0DLlgK9xufN z=g)Z-_9ez_TQqk6XW;#&UZy2Qq%V8)T&59uqfkH)Dcygcwn0xNo%;cSWA7EBKGxhuEuom{pnVN%aGco@60h_< zHB-kZLdhv4H)7xH5;|a7VCJq&it)^?Cv-*I3yq@JbKGe**2Fh*E=^<;q|6zRb}2hv ziUs-Mwnc3`IJV#k$GSlrLr+x8FS=I!$a}?wqfleZy)AmI@lo|?(g1tRu=9OBGZ6*M z9Q&-D%I?~21yp`xR;Ws4`@_`>ma}3jl^)Y@$tA^|qo0u(R8Cz-OfU~9Q5$YyM) zW|orVvZzexLqDmAM?l4I7Z!B&GLEuxJ(dy?zh3CAex}t-Hd6fd_2{GdSk-{0PG>Ro z`y$7)IJWSUYjC^jOoAl$a{b+~Cxf0R!>47gEuy}kOsZ$^(?1G3`12L9Np2XP_2QxP z0^vd$cZvI87Iv>4_FyWJ4Lh1{{SHG3)xt)~-&KtMp-KPnE+CN&$RZbH4dCA``Kq|oOp{5hEqaolOv$$NPhwp6tLHBe1c(BMTmVDeo zqkgpcrO_gN*ClI6GLHCphfeUSx_HJ>gFwl(9V?!p3J1g<^dTKBHAvl@%g+A#5Ll*P|M(w&}m@ zj|nRj+*z>EQ|@zG(o9k)N}SZ)fuTKM(9iY}b%}i2P4HycTGZamK#9z_JtmoJS;w=> zT1?D+W?O`L;k2K?QPD0>CS=gF_;NFUzNoO^&|S4}HmlAuv9Y@AqiD!OY|vzGJ1nXD zf=r`D;R}2U<}%%;>xf9}K>A>wwfk#TfBaY=n|XfejA>I5f%BGmKA0d9G#u4!bh?2I z=8I07Fctpqlfjcbd1xESE3vX_i=AX!?(EZcdY}hAA*66jIF*zrF|vg*=VI<7%-qnN zKJFn-py)#D%LaeU`aW4U{ZvK6Cv`xXNcG;aB1M{wS6p`FMJewEUb^Eh6{>4>oS26g zF{Hwuo6OFXGNROZmr_6-#}YotC(uzQ@T|Wx#hI6cT68vt678h&fj`?Ri$78G_rhWA zk9cGU@}T*JetuKy#B>}zWDONwyxmaoMK~rmE#h6_y&6XI#H>8)OAh;|<0dRiGvj7b z>>3IeUk^9Nr?onIC?0V4kHY;8dyHj$) zkt5~?GH{{(n3=ZF>crWJ%#tGOAVpiFQbcef&N5pDY-6*|S{X8jr%N5u7UOzSI(bqg z9Nz-oun_z_NYYZ3X<2i@k2$O$FGU4xH1K5k|>85;D|i+OXx1#V#TyYf{ZcHXe^3 zp3k%H>9U)(=5|%p!A{!29X|1KU;Uu_wEQjB@~qx_W6r6PhQhKRkY)(0ugbP1lxrN* zoj@qcOUlEG6?-PEJ#I>b>tLb0+Kj+*DN=80`l}Sn!tmHn?eYC4+2Mk~8JqSS9~vi- zN3H2>&!wv;hde=BnC!~7#MzC+{0U24+~KLl;xnQN7K)*t6y8rHcg3a*)iE!|c#bhMha4V4=-G7FtXt}hXn&*l8noyz|dYwrb z_M~vS;#-S&a|1Qm9z|C%YKK|U-kPkvh@)spE==lKW`6ljFzzqIba6^u9KCl-?Ua7LS4oO!@Dtz$tHJ}$`(Bj4|`EoQxt*e=?&-+7a4=Mj-y zGw)}~DnAfC^Al?ewj#d2&AEdXM4IRUs#<&|#Ly?PX;46`h(P}O zW%Zw;&p^Tg6mcuOIJj^b^!*~xlcLKszT7$s(2WCpdjdr zN0awX)Fy6V&mYOBYm1O3aQD4My!1ZyY-~x!(YZAH)d;>UZUw9RMc7jZNN@ zGeqX&o0fjkPL1`CQL1J;dY!uFBrM46rxQ*5VQo4as#DghzZ*DXb=FjMr#>f+f?7>Ra(QzLSCJ{mWr1Wa+FT5dR;gygFJQDz~)UzGJ#Ob84k5KwYJaJuPiNI zLmT*BJDBB64+24+c#2545(sk##dqfpq|CJ;=mF;cSj2L48vQR z=*f6w-5KS&bn4^qx`?*0w4uh6bz@q8^d&;^%ZxZyZZeQ4-905EKHV#Ffcu3$(_?IyaELliFp3wDY8x>nr>V zYE!I}*q&(2LyeDSShEmdBdZk8U7FwE)}8WF z<74Rl0)*xdXKNf%{6a)Ddybi96vW4rIIvP0X}yTNt`X5>Kx=k$lz=oi`JT4!fP6sG9wx zsK8C)740hOYr^h!gW#x4F}IL8J}JG7Sx+$!hZ9Ag+G8hlnp$%bQ=T%;PFU*U;SGeY zD&D-`Ysf9LQ?|Pi8q`g_aUW)L7=cT1wp7*ULaX756f>ETJ9F9uVSuJkhp2jm!oxM) z27y^aHg^a~d!leObNcuMF-Kjuowsl4^Hi~84VT#6CHlDiY%yh~hru5Ry|^%4ihXfJ zTp+&JG*`}RwEV3?1jAujynY-uoqw?qliy~JFJkD@tFi(LNkJF}+L|A=H-X~AJ?+#l zH_H1NZseYR?Y6oa<2tyxquMmf18-g7EDnjL`9m^lH6J2UQC>P$z;EPU_C$R0R}ULHC5v;ouO1$)t4QE{S`w<4_&RnKFibWkyz5@0 z`Zr`gg?UN6rd+LaPPHh>@=&Bz^EuXh+iWBQ*Bn~X(Agi0R#14Qik858bM^ei5s{(P zUhRO!lfgzZ*`5K#)oV83&^tY-mo!y;hKx9QdG?9l#Yy>W5oH?S*bU(+gQ)P4y?+%tD_~++R+$o_-8fUQ0*YnezAZNpW3{ruK^}^H;=Y zbaGr~8MwsPwkcOV<(f+j#Po|Z1?`E0%(lI3-a|{rh0g}=q_C)&Wmg&+ z3)$yZyR`UL=^(5jK0PRy{vAH3iAoBW@SgFy9qAEc%2>94I(OE(`V7mYNOUD;iejXz zYB=8MPA|2-7|6-kK;{%0Z>xG^o2J#=_@1-7)vO!X*}q4v)vT1j#f?=z9@CSdOEvbn z3wL~OB*k(Lw_D$ft}kFVj&4F7Zma6czd%uqbw-V7M`1J{A`48@nRNkiXnQvQk(P4= zZtYgUL)bVl(fHD700>DrOY}jJ5ibZIfsn1beBq+hI<^220d&-`Fe$M`qEl5w@`Cu! z%HE|nFU6SYuKfAWGmV)nV&9d-LRj`0j!s3)4@Zn2e~zFCUGwFmuXo8@rm7RU^w-ZNDj=0(agUMDuDt2NuGryLpMFf{Y8fyJtma_)DY97F$iE z5lbFL&16O;CHL^c)%E!%Ok0q|Bfs_!9-L_muyd>&lCLWW4bA^4@U)4&Z5^-Sz9ed| z0UqN|V#_zL!_#F})8iS7nXet!&(5xcb5x{oGnQu(Y*6%^Y%fXaOX~W3i4ap47CLiJ z)MdglND@09pDDqtKSLi5D~WMvld>mmRppgP&&*fNC-o?=;^!QB?4ZUronl3oHKpczKpC!Q@=lbKP z_?HP3L7%#&Co1AjqPV^j&M0Jt-&?E=OjiXlo~S-gS2Bx?a7G9^yKTUU#rxCEQR_+I z16zDo6W z)SKR+%HZRg4}+dwI_7W+g?_H_(Q@d!$wH*y&O=Z6#qK20i>#(7UDRO_y#e)NJpXrF zsyj?o3}7C;^^c|?3yasJi&=IM&$ zJFj47{;p!HUM7NcN0y*Sd#J7w>mbQpJZm~)M@o1+uT#1q%K#xYNGH8d(V_iGx3V#E zn-Ybq&Ycxdlfpc-u%}Cz?o?)vC^uYiqgI7!D1^l;7`+3Df6|Z5p=!EjEc;&|P`sZ+ zxSx6g?`%mfLNG#K-??Zoq3AF?VV6x~vT^txVVEOB#SPsbb$e>|72}!RaFh23rz@r}(U9z)##eR;UoosZuQ6SN*Y)yV#YI$0@%;}V7$J&D zf~J8%k~x)9IAMFK^nBP=R?=d%UVs>pVR^;E24^Z{SLob}e z@kJkxV45GWAE4w1jPWIB$ZFjT93|D2Oa_D33Y zHZ?H3#R<}+kVpsJumc_f-*%Eal3OwRG_c12L77#C{*;5sk6L$z@n&xe1407!AhwuQ z%0%^GoLEr!)`?s48&8Db_HLC9Mpp;Er^Gi1?pJW#ngp$ksO%|DaWs2@*STo>AlS*M zj(gf>f+FvW>36KbB=OBJN}ynzqVXh{{io=+RKVYw~tVwf{s?6{d3|vF|dgQ;wD*pNW+vtfP38)%x~EuD~Ux2ntaVn zsd!zorZxB+jJz8kUXH7^Kn08xjl!gk07_(y`esex(S+B|L;4sJ0m z_Sb0(wPbtVzu4Ho{bWa${@ri$|6&F4RKRo5^g)$O@ z4>nRiX6orZ-FvSiBVt>A&MsPmNN@EkaR$zs`J+(b6z%{eDhk{!fUX!*^=tbHpJD=U zBmJb`YdCoFm&COXqwUCD@}p3AyA#e-%2+5=k@5H6-(`D7-DiK$RdxB;(OSpKoM^6t zGcQhRKJ0y9(6kkC%2>_zqlUb=b@ROh%9>yy~=w$zLf! zK>x7j$`!!spUXTt=m`7aFYXC2;Jo#(D=)7sbL*hvoaUv!J~V6f{PiJK`|1B0FM9Eu zkvwr=bgn_*pYblR@N!{SdDng(c{e1vKVXEJd=x3}qyD3^VXl6DD2CyUUuKk*o9Y%Z zY5VE*o;+^^J+L1D5;PK+4a0_}ltoJy&q6zI$K_n=3y5&i_ecY1u1QkMc`bo>xW7l7 z0~nSt_ojC=H%p-7#aWV^9{#?lPK3M817$^njq>|ym~te}MF>Ltv~bx6pQhjcFRxM{ zVKQeK-=}GRfkR>yX8Sck@^tC^B8p;}%%y_3w@jqSaj<|grf4}pRdsz=>A;p?>Zu_Kj|G#{9>1i%5GV(K z8fC#SDK<$oxS1N;e3Cl9minDPxWr5|CL~v3p6<;ChZriqyCp?asOd$1?`T!K=~|{= zjEDH}t|qsp2}|NPR560OsVHf#Zk`HC!S!ZIgY9p1l+8NVKp6}Sl#L_R7?vbl0gaQEkp#=xdD^kh3)E+HYw)Hvma#8)%8q@sd7%Bjxg6B0-jj5EyOPbYLh8Vw!ELL;s!7s+}(-xbs5YeqRVTI9`!GEBGleN(HcatIF z>ilPBQ;qLLkGq+PlX>Li$4B#r;ERV`4vd^2ES-FPq0~t-@aiVrfSOsyFU~Lf8S2)# z;1HmrpnkVs!l?IXtmSFUwjpKN+7|DUvbIWB)cQ64aiXx&8si9u@750uq{ME8yi)wH zjep=8_EuNSh5S|pQy(%1b-;O@a@Ru% z3KqxTK~f}bEWtcoyycbrIKjccjbqsl$RMqrnh6Hgs==?+n|%C^ztATm8$@098T5cL zIW!}hBTm;$8mt}Ct4SXlzrUb&vGNA&b+mTs5prF(+vy=DV!2>|+^A>v%I@EY8kmDEaCO*tr=CcIkk}0#{ zeLp?(X&l=`0sT3h^rgPrI1Ho+y7Xr znV@VVoWyq(x@(TqPgm~AzC~8Ku_IfwXfnT>|pAFH@F)`*l{NAAqi zI8>UieH>yWV1 zw?qWLaoZBeG_;3CgkbgH$nwgaDheunEIJHj>);y4EUd?b_>H3J3D>B~Htw3?#QRI#F3Skryz0O!Eg_AvnWOzre(r4kN&24l${t^n?n{sh@jR|c9J7AKs^)D__t$N`3@8`Oz-XZiayY}gA44>5g4tXrj7L$6 zi9{olK8W2WTa(;Clls2Eiy-fV-R%G>MSV|VmMP=rpfPU2I0M9uXu_UPEE>YGjlL7w zsDwk}_kKeXnjX@x16-zMdE+yCA(?=YWN2pi4NaBonuLEJQAxaks!TQ02gI>Sa_?~9 zoz`!2zS$vwY!w6prs@=4r;vpWygBe!abEy59!UaJSCvLZ$;H2g`>@zI; z&&_V^@1E!VK*LmM9jw1oCN#ebfe(AGJ?9&a@;+>BFj2-E9n2D}9pzW|-TtOYC7$nq z1|*rAqo$4`1ad~-)eM?qk?A5_A*{WcMR1l*2`{BCBS4o?s-)upW21D@>zQ`yPOHsE z5QluMNQrk?I4m4^P^NI!nh;4Tx; zlc41#j%(`(+1_6u$^LJUWIrJ{hJC}S9V4U>sS`%mxF-h730P6vr&!%h2b746`*WZpF12Z5Mb#sldA3P_BF zsk7jOTny69? z2H^5g${~&pNBz-k1%_UpjS}iw3w1`+19+A1@fUbptaN7Z=Llns72#7#edD)~WFQwJsgJS+SacLlIr|R*4NUiG+}1d5f+{sSy;nVZMUN_@ zjs9$w=q6KfK-Efb5=KA%jW2H^;FwU^{c_y1_a4^6Cmr)-{U>O%R|3XhDn~uiBn%vx zE4%4|AA(IlWk9KVU;Pc|GZPpMoL-0;JLR?P#ejSd&2w=f5FGv~!Y1m_2_+oM7Ou2( zuJ_X4oqI>7s%9gZC3D{r8bw(wiEASCWNKf@U#N8XB?DGM6qoi^WVhXD`s8%1e_XaL z)f?J8tQ`N}Hd~Sc+n62y!76-zk@go?@_*(1*Z+A(`0`*8W~y|)SySiQGD6eOO^6U0 zEGuCk5n2iMY}b8HfOa#NFN@yAWbl^pRdW~Qw0(aR?v{H<58AjwS5mznY5*?}u6RRW zz;yYsUstxOsk4v&$G(VvYJ*+Mn;+9dZkQ|GvBojpJ%JrWzUErq_SsR87*#tFYe~|s zks1irrV$$tIQV-wV%u*;zPS>pS!YsEmR(+G2SL>N_X8MJ+^LqCMx8(DMi&_a+CKeA z={Uor$Olcap_K8KCv4btEui_^!P0UrjKYr_r3tp6G~NaIqfdCN@g7$%%ln|XdO?$k zU*uxb<$!wM%^aLNnN8YpkvC)~ah+SIaLwODI)9AeMXk(Gb3LFX3OMHo?Yv+7GiFaF zd}~DA3~iKV=D)3T6Bxy2UPl=1*>T1NypMujYKPft;6JvRyMVZ~^~DX}W`Tb7JMAZ32EJ zJo0xx*)*YiHu(cfxlV6kTB}*60y8e4Of1-$RzN@$72RGGf2CpX4ZI%3hpO?i!x$Lb z1hTc>+=`Ti%=J50$3~PakLe4u6`iSQ!m@AX$6RoY=mSnrS&nq@as1J8^Or^1sY;bH zZA1Q7E-~lgD@7tfkwWIX+?8o}P2k-1u%sx5VG#&bBQYCGlwjnBb9W$k0~1y35Y_v1 z5`#4#mWO<8K&hxL0Y?YTc}}Y%FI#~pm51oiR04qcKmJl`xs6eVgf@$@!_A{7g|8uG z^fo@lKL1(7*#oB*{Z)S?sN7PkM&RI+rdj$1n$0u9dMM8DmluTwJUW6VTyO$uo4|~3 zC4I)46H*b90Tuc@aT~4o`OOw+Gl&)H3iM|*S%PfK?~FJQQ%sB(s{Vzu{>g+ zBIfkCu=}seHuzJesf`F15&I>ASM`CrJh;Dm-Z+%BzEf?FUKlBgoXi~5(=pUhnA#b2 z{i%cQVA{uoAqSu`Q|Ei(oT!~Mw!U74-_XrIm{83OaL>;?y=xTcXGZSW-&%9rh}je< zuZHYD)#K%n)n6($c6w1oZzD%qjmt_+5*8;SM+!^IZ_QTUj~Pt&#;EHdji2(^+hKiO z{dB;Ir12eXhch|E-$E#la@CLEVjr?ZgZbjqClWQD(R66PbC7wTt-5i+q6KQ*qKZF;eY|=2TorUyPC;LchD{ za#8-^LSe!j<>D`4HW%Rcm(@QViqS|s1MR2!5iW}-d_0;C;&LA+)}(@E`7L~Xoma&T zNpptCg>O=gFHRScwVF2&zNIdu;|84wYpG^R79j;`qnZyJ#$nFZ+8(&wOQ5o$0NTr{ zfJY9y;?0ELr2hW%vvf1DcG01#BKar&l>^Dd)h z4Mk-}r3I>!3DZ<2WE~ABudB13kipU(zc;Xq(ec2nj4yF_!N-S2KklXNkK%Yk`;0x0 zUZujM=7<{q%41fVXXuvhF$PhCb9Q~U*-4Yu-?gGr{`_ra11+|uq#Ec0aprbzCr%P4 z7{v(kXj{<3UHd2@PLX@{jK21v9Hn zYA4~0iA{;2vIO*%Tt0&{w~S^iE5;K3+$hX1mxNvtI_?^iemgFD_cmGNb~OM|_!YJf z2q`Xy3fLYin*!+vw69||sO6@UB|c&XaT12WVZ+#7tKKCB|0Y>>mD2%`i7A&CQK_JD zciji4VXct2Fm2DHW_iZ<_Q;;djR7qQ=X8^Gp|BE7*~8j<^Oh*OCe{@3asz8<89bF2 zvw*`Ty;{!YTlU311OI?l#=LLtU!i*$-Db6gqoOn)I%Ia0Y#2-2q_{-nSQy`%IMn;k zo9))xnFIfPZ?L??zsTjU{vykxD6ME zop<8V!>e|dAUc-;m+uB>ej?E@m61_$yC{1_WgVcD51Z_t607=d~bCkEUETPE$UzX_I=ycx;>+=JsYRmiHcU@ z0Dkr2!qn1JfOC1>J8`fm|FH@RTh`7#dYoLx+1*C6#``G3Oqhdcb0YH?BvPt)|4HYr z;XvHZI1LIDcz}pi5JIB68~1_wnxZ6iiI1Q}JGDxN#BMz=tn;{ZYVUrzUs2LF2j#gq zH0TQ7q-MPFNy|`+TwMHBU65P8>-E#GE!-IO-wCaiWwpr90;$;$XIk#yw1SZUfY-fy z(?8MWAx--6(|)HClmEgJK-UxR-9HFUbthZ@53bEnhk!_&t$^?&)wqRBK^Om=FVMr5Pv!`dS#8#*UPnoE`I}_3)eS<+K{rVp-MgA)Zt{V ztQW32@vkDa(^ub}>Ds-rF(V}vBpvwI^}BuUFUDW&Uq-Kw|EoBz$_auZ=b(#!-*s1f z3qlN8CYJ#R*~rzE#N0HLd2<}3lMh4{M05Yn@wSgI|FVKaS{Zl5A#JEmJ7_~!;vREP z`QEL1P&8o&;uCNav{>V%W47wi-~C@0{F8GihG+GshYhb{pJ!r5s|yq! zX^St@h!jwyHxMXF-1)I+`*3Tl^8@9f_6_)JYhcPFS*}Lydw*JKX5p)@Ox1eyR9Pex z@A27SAG#p8=^f9N_IqC_SkS$bv8;mGUm<89;Ye!8;&fHrr}o*WY?;RS9vu^}WGAIy zPv-Tt>$l5u^;_=fT*yBA!0QHDz}6p5ag8Wii2=M{y7)m5={1yaZY@n|{I-)=zt1nO zEYGiX1M*u!{q%{^#j9REI=Ax|?E~(0$6%My*xR)&*RR&?KDy;rui$9^r8{hXq<5%( zRKfad`LK!|#y1)CQ>}3b`lYLXE17jms~HN(8ZDPzdY|BqMMuf6_rMael|Q1EJHB|R zorkk-v@lWo$oV=$n<7+&o^wmd314n?SWOKH=U$=(4K2noqQ6Y4-zEc_wkI7F3(_c` zuFq5r5Rk=LK5PdB#_xm!;boWwAN_9o6P^JMW~6kqDGF}T__%w~*fx^ed{>WfyS8r7 zGhTHWWRuLugYX-rHQ@y}^0o_WGZOoRw3tGfL8g7$#hz9F8mGq^k^E;lr;r}i3GxKX zgR0guO>OJdBlk%6V|@Fc!p>-I1rEuBeL!r&c5>nAIq?1XdBM2xTUBo2Uq&)rFv=PI zz|l_oww4s>X!hRIWsFWlg-_6`EIBRN>~%Vw|*J*%C> z5x31atW1%+ah_^)?U%PNW%dEn>9V`!q&4ZiLB+_|*kOHuHUCX&M9)|X8wjcCz0G>R z*&EvKa)U6M0j$0 zaG^*^^M%lEU^0F(!ZBU=)zKwGb2oKCxtGIHkG}1`6B~f2K~0w* z8^Qo6!E%!tLr!Pr3U@=5`Z@Fgi=AWUDCDDt{5Q;2-K_vE<_rvdJotSDj1jC(NM_&X z?n42up0(#Nkd&h&>Z=;_fbLpor?YPm%JO1-lmchXk+TAfkLZwxb=2^-ENsYNrB<_^ z=e2gJzmaG1j|aR~jP63`fm!LtG8-q-Lpz^J>oZD%yhnjNsq*x-(r_Avesbj>PTm{A zcD_LI44+m~7WaD=*7fNA_^ygSuzN4+V7ETBplHnhS}SkubveKMI}uf*$35Y~ko~7< z0&bu0$-1deVMnKV=idsSLH20ZnZAv0F5U{vjwxULf!$3`4$fzPcEK2|{d+ozzDcd~ z+i>m(v~EIEC*BfJYMfgc@xhVX>h(E$7HjTp#8it!{7qO0at}$|MLU|MDL7oAKv62a z&M2zL7Ccy<&7_zJr<7y7LZADcc#Aec2lS&DWdD1wG*ud`zKimx%p;Od zL*1$^N5FAXwBUQKlTU2%*T4rQqfD^1@vO*v0XTWZQ~p7jvZE@oKEtT5EwcZ#cW= zw+7bT22HOps^`~-sOjU*l{x-j8HSdWAT%z&WoQ3V5M8rQHBNH+ zJUyr9(uCKq5gQjzR2B}dYkU%GYE`(bs-W-kZHDyaAtZs(HHX6!a+fwTzD2)YQS-0i z;xJn8=z0R{raS6(I5UCweK99EvP(%{JY4G zGv*C9DG;K*{nK6j_aT5hgd-Mc7Ja4Rbu^L}f^N?A-!0fqE$6S3aQ}Vzfq4AuE)BEt z`8VD(CDgQO3MolRzhr*T*R46aFkuj;!VcL}_2wWBZ>4N<~abMB!n_V#U0=N(Sn2CNJ3Ha>o#s0f zLE83vGq1$(BR7z9naBHpeZ)dL|op6B50#IsFkJ6YVH}vt*p&?)nu}Xw@ zUycZSiBxlh&Adwp%9$=ZT)sw$HSq@N(Y1~bTUOKFiq}`z8HeRtw?f4fLzl9kf{67Y zE-j%trF{zElG=4Z$%936Uomi6e*FL*7N{GzRoEEMTA$;5+yZVIojqNC^Y>^dAiCIa00Fw z86Pza!)a|fgp>S|N*P_(eo^Q|;I zp{{a5&>`G%2M6euRi+Pf64&BVLE3pFFT;H>p%V198J=A8v49^>LX)+3dXIhzg1qIZ zg0TFKW_X3;2w(O?#__U|WMcJJ-PB{ zl_b>s{MvbU$yk@`HP2--2lwslCOPMJDBtD8M;Ldeyt?V^(>;}9gG!+@3Z~9t+$G~x zQ&JE)Rp=PT)56is%_1;K_HumlYCy8oAIb5V;27o#Ws+pL*T}cS@;F+Z_~fI%1I>kC zH5zO*D|^pXkoFfCFClH_!)&fzlg*mS2m%p;%1^q0GkP<~$0V^oxs?rA7nRH+h@SigU4*kL|qM^;;y0OvA_KsLLKW114xCJBOB>vsl*QM$6fFp${6* zFn+uJ+yHtIdNN(*t|=v5L+o!O;CaCR#<4vkBqv;yXKbR|i8H;|+Buj+$Y8(ndtccq z(cWUepSNV~3iWMe89Av~Kj)^6lm#uPZ19GZxsBk89k^cAceN($RLQv)uI~whfU8*P zv^x~Bj*@#X4mHCI4IUQ2K784hZHYQ4aakO}=(fF8=G(bm-a5!vpc*k#u-$OhE`UVs zgZ9dPYGH{+t&A2N+PY-B*1M29e?QydOd&y~^Q0YbxUO+Vw=TQ`ux73;d|d`2x=32b z$5W6)Y!`aVUP5~%KwH$fuaByaPyR)BJr>W1(<6;zJUy(Yc%i;?7Dhd?U7~|bWku+j zZIHipP@soqz4AzlaxcR94M(d`LL42w)2OfmQ2A9_Nu_3Fpp~oRXH3Nh_1+$Omn6a9s zxk(6twFY4ga^)!lm-RLhXiCxVcH2vP)_Kj;Ggyr5XFfN0k@MPyw7&rHva4iw<|}3! z+3#vq{#ZA$TjgGotq^^JsX`vT3;a0B){KgsEioYP1GCPqWwR7{f0gDRYB1P2lDzW* z$lR85>?aM5@Zkj~)#|$7Q?*y0-~{?89Zpf}omkv;zoB*%zLmXzv#q+J%EI)P zqWRM{Prz9x0{$!9bMveMC-aRftWW4cFHC9|?H>Fe(VGDMe|{QY3xPrNzdY{e_0IgB z@1N}whXsyn5pPzKH-m%sKR@38;uroqBPJDbRjm=NcQa?mst-ek9#dF|OIu(at}V%9 zHA8)<^+4xHe9De{OyOs1_}P*7jW|&i5$1tL=_b?73;@hBEQG@|Z5cT6l4nmsR0Fz@ z=+5ijmms++uIz;f$TEk;f-YmJs@d&9kEc}?7_7;yyy2=l-{H!{EyhKW5MS-OI=g!Q zUiO4-ug{9w1OU@nC$No)@pi=?CAR3A=*K=b_FQ#En~@O2szU+{gS3lVNyMbgH&C)3zC50I(%nRaP*hsexTqiA5XWf?~c3?v0udN-k>$YDT&FWWM8Oouuk z^10d{?aWhVwTX_0uIlnf|2iEY11e?fo)gH|H}?;Sb@QLr?WM|sTfAcfe@Lp1Y>f(XD2yFK{d8oq2DnMIm z!DjWA$A)*OT{N*e7pv?s$DA8MxO3-|huy-h4SB=qSKkZ_*{&`F;T{B0j2!l-$`2-s z*dDdY(*zL3tbbMk$K>p5b^tqGNiUN20cG-DO+nc7`SphqMFt`rziF?WcIkC~kWCj| z{jTbc9gJEEfSFG%nBpRbSjE%pVl@Or*yBP`Bq%z0aEo}BObSHU@*xx6lvW79mS`n1?4CkNh#vib(tj}b-MP6G+-$bR2n*UHaLuo-HQqZHy3Zn z7uj)|ZDvG3__!-Vf%s~P$gT;d9Y>kP9O$5^qCKDW=&dtv_`+%#i-uOqnlE>t>x-cy z%~llwNjy!Hbr|m1+~f}O&SItJC?a$WmeXfyq*x@k5N z1F6WUh%x5c1mNX}=7>tg2=-Bl#a-&wuVS8Cw93TC(oI33mRkT{{N9d?S7<(Z6wQ}d zd|9f@-LILG_g*tIiu(Li9&GZ^#qH(|&$kqnsTWv_D_>cen==-#u>db(n4?ka7eW_$)`pVG@@0^m z^2ek#{1>@Pf32lu&k6HacN=o3A&Fn?w8-VRUD)?!N1Tj1NVSEVa-r5xU+U) zcd*Zc$TeKIOT3%5LIbiUl5I7~f zDmgsDif4v{+bZiD2KkU-t4SW+j{=0!o!|X@D1wE(J>$_@T6P5uonX+ zT-GBOsCerS5DB>^1ns&rB~zx*&$c|JJTh%HG}(~1YZoG}rZl1-)utgmETH{m#$-&p zt=M=Zf9z=o8I?Lwbe{m`SP6Gy*|t=|Ru+%87lQPzmmSRjWw5VUMh6UnR4;H;rlCa~ zXNxJyrX5Wyb}icH@f^pM;^j&x69i-*wbCOQJgt``qUvdqG)&~wPQEuUvOGj7CauWU|vcF)ege+je*#z1a!J!% zT3$hDtG5AC{;a=!GG{&Omn&qitdb*o;^#^DfnEhNiR$y1kg)ETgQOQGBS^6^qbMa9 z?&$mIGKr0ofcI)>7fI`rNtHOd(YxHjt_iZ0Q(7yAk(;bnuAz-gI zbprW+ok@XVO>+l^BeHt!Nr*o;t9IGSCs@Htj+_yCLcLYT>@zeje91-g&}IgV8IT`- zy4GOkx12^5;ZY>~>|Gls>AqbY9jIg(*86T~!1*1+%SI{qi1x>oT<$t1SUZm6qASScVVU?E2_qC1b~7H*2?@X&~z=m)OZ%__OQ zipv+77o`+hcs|n%phfOBxT~uHnb_dQe8ySpH(Wl+z1-uW$G4-8_&yiihcxKy6Qzq?k{)8L8Tj@G1VlA=m7T>(JHoa_q6`H^)eE`{PZT^F`4C zJZXW+_|N#63BlwJihk78*JPiC8l??!)}PQjR;`HVIo2_aSIMe5A2;vjmJgbK?L7r6 zuZzkOX~aNXl_^Ei3?IpYs3@N{p~C3)Z?=(RoZzEAQA_A7sP$G1tT+VYg+vMNTBqkB zUs(X|A#xB?PISeVh<5GFv9BNc#9w<)Id(`pb;+tA=`@Zxu_-^`25R?WqL=PGxRzm%D2G+W%_(rOzY zSJ2?f>z_Twy__Vb)%nxj{&VjJD)i?yKUvW<`*cB03y}0VfTX=biVs^X8Z1ms)zlcI z?r{HY^G=U(V=wGhIw32MXy4+8Um8f#kDAQA9rk3Vuq?G}{s#AY?iOFn7qv_iSDB_3 z>B8t6nuHUt;?Z5uP08fLPQR!fU^SQEp3qkS?<|iL>0*`;WdBQLpewi~cd?QEQ0kf@>FOgct#f(bqY*Jz zbMC|D1Cxg7-1Rtb2|#6{W?zt*_pncFAf@ zl_6Q>%aL6`iqnW)DhuXJmvU~wbzYd->m682EmE@Le;*{V`DB?~FY6mym!!-isHLws z3?Ez~%>33{N)N%sNXQ@_c=sd5gyY0LJg=QvJ_-Qv$p*S@Z)h&xd0HLB#xvkuP2 z=}3k<)GwM>rew3|zObMeK@trU>-Rl8!1c=9O(*}jYPV!4rgjB6Q~}>hCSnG|og7)x`~mJlF$}cgY~H|1MMD4n(o>s^kSemmj45(gJu^w)r9j7#1S50lQg#- zBBJ+UF2ft`%T&g*uq7Y0yyw(Y=B?0p%mENSwscAVn9$L|-D_0A(fTUS9PQ}LnOdANOU^3kaHz)*K$C!CQxlU6BqCsKvOn0wnN%mbTK>Ej|( zy34}NBA4oX+LvSCg$Q^q`3-5sSP}-nlY)F+iPPo$$+H4fC-IF?De(gH7Gc}2XGzs{ zH#8G-)|S+Ixf*p2J--Pp;^c;mx5|CP<3BWqj%LlpM+x?G-5fkJ)ECrj>LPVnz;23E zm03&6N)(C{@JVokgbC8U=L-gOjjPh^0o{X(Y-N;>B~IFg7Ot+EZ8NE*)|{tO+pT2P z?=CKtMlAZB2!2%h_JEu!Cg5pk2=c|&`K0V|n;W=|MWsOE6=@RkP9{6XNw7PaV!)DX zSS@dN&??nuFs7GX(?D~>-6kzYE)T@yp-k)g`Ino;bYPr}s3q!|A6RFPmTn zN=fnCR{%_qh+F{)zxHpNUy+CWx>lJQW!F!G7VgTUTCN9nw*YYg{DQxu^#21Gv`GX1 zg=QcyDF2Ogx)D;aX)yViT?w!uI0-koGB678^FI+G{&B=alP%@B7*1%0a7Lb6FuJ)nShh`A0Zr*Mh0Lre6DRDisJV&j&!?Xo?yuKQAOe zn->*r7&>>MFkg(*u5UBF0G#3Os1&|Eq2X?k?QX5rDc?DIdy_VZb5 zCi^qUEq90_fdMa*|Dc^%jF}QyVU6RYSt+(VswmJ^Ftx%BH%qF1?d;`badpFzRZ_K# z65#+$<0-?cGzTDPNQCatsaEv&)->NrFsE0MJy7u6}7zH2E~| zGRCjHev6Bk!KLLW97%j;w^?2W6P&vNwOLotz*1wG6;JR2Bx8ZPq_q6GW@x1$kC$bd zlbb;Y#bc}SdxmaH&hCgsbo?%ASV~D59OY9q|CpdZ=(j(jG z&$CdnCFHg1&PJi5e#f|bdxw3j{}Q*w^8PC4pCZLdl_;WWa6FWr zL{R(IzpK(}t2mXv|An(?30IXz$8)_+$nuh7ZZ0~%$#Q{3Df>riRk3+2i_$#hgC=Orr&H!unb3YDtW;4QbF^a zSbAXys;SH}dY^zXa)rJ+OqjuIcV|ooRPRBcfb_%*l zZf?rtHmF$9vTQtdV_^ka0=7Va-}K8bx6IJ3ezJ7%f51X44-$?Jupy-H@3PkVEK)gdO8h6Rl8gRKKXy- zr7GbFlWZs1BEF#reKOc4PcP+tas`VHDfFIk&*`zDDoeT>J#o_Y`3orK%+GjH@Gx1j zSx}p@IA!{aqQp)XuGd6hyrqQW;fyAk`j`kpTDsekTE-9!7d7g1qQayIl;>(O!f0cXLKGy9uSJemleb8@=NwUtr ziem^LZi-wAm=&IKdyqdUCFf&Zud>Mvf<*3D80IUT$F`t~=zz2hQD=gsB*fqX_)Tnhm>-8)THl^iGS+1@bzby3mCrG?%{weaYFNmq8 zv0D4dyO#_)4R3POC%ik33USdXdWWYT#Ej~$2b9{iX<)jRwUazS(wvxOaaaaJ$LG}7 z15Hc2t@PLz!Fs;;Q6X8eL^r?qDALtrE-jOX?lHN}H%B5@YD@gxIpmM0<*xmEfJ$OS zAYw@ORyjfkaI$*8C`s$EA_=1!eOqUPhsqI3B8E~4(=QT{kv$EV(j%vrmBjWIpwMqHJ zCk^CyVSg(3(vcXqK+bCp!>F^lv@D`r(DW6Rd=m6Zh!>PIv(ztn>rbqt($ho@F#8_9 zDfEykB+5PK@K|j2B22>%te!%JC{Nd-egyJQp>Hx7(m~D^ z@(9F^&iv__KC z|FyT_A7RCRY@Ud6-~ZKNc<^9>##MU|h$L@-L4xZ;iQwY@+ir}nBks)tFK^OhMgl|T zz^!SX1mVg`bJ)&pegD7j)mTqa4|NIXsIVN;tbbHiE!Q~=e&O3RTaHkJp12!>*w>W^ z(37>_^VK`Ddhh8C56<+X%-LSS7|knsf(=<6IT>njylB*d$EAN3JFWkKCGk^x+WTV_L?nLRfM`*n1*r{og71E}9j884v0F z%+$e`>>&+ah#mAgXz6jyuW=ia2a`?oYk4trS28wY4XaMqY4&= zI>ACz@o7H_F~=6W|HI#*_R?(Guh zxo407P|Ry)&$I^Efl&DAzehtdBqvCC)^dK}(@7$>x>S23egL!o@~HUs0^X-bgdn>H z%;qEUm%LRBC=8)S^-uYAZdeK@|Ggsji_shF;bSXiu4#g=NN8xBKSPc;d_Ao+7J+E> zPu^s(vI9hTCM5fg_bhX1ojTX4!37lgAyg-PtN%iY8-2QUY(YTLeouPXyYrHO)>&C4)M5$GdEP7*SLfV)@{`A?WB5OS}65M5E_t_|2F z>_5fSveM&B7pD7w7OKbpTAPE}zeon*ptn_~Y@x3E=%Y;Hyeif(=AePCxW2CZwoJ(}NM#DOGR1CdyA|H8qzET7LYu zC@B}g5_;{*C9<{2LqoQ9(2StN;^<8kFRZlw52Hdk=>SF{gi`*8yK< z^LjGvjTh&&ZBZLW?;0z5m;KRbOU`tL+Jj9=7;rWzyyAm6SsOs3#Sv$A!5Y1%P`W1x zv`57o4Yu2-6?~sE=ulRCz?aIvCrEnvFGkXUQr8h%SOkz;NFzHx1+_ zGdYeYxIohqk>uPr;LTDhW7fMXseM`pM1buSup>KfosB-{T%#cJqtu zkRy+_qvH6tjxn)5Eag$9OQ2m}srIZ-AaL%ac0ZemF|u@5m|b*D0&ff3SBsL5SYB0% z=lyjgK&W2jh21`0;TDiBtrvqZmszSk>=ih^&Ikby@{3aEkhIreUcK}VT|Wi%1CG#f zvpVk8d0}&k=0PCT{X2XT?f`Fic6timttk7PiB$!5?L-$-ReNOUw(oh9A|$Ru!qz~& zxxD~LYD1x*-K^YtDs8dusCs46jAu*!D|G&#`s{E($HcjUmaTUCMq?a3Ij;r7n>ob( zL`lfYwJV*Ze)ghmy7rWz~pu?tF9Ig8*#YUC+YL^>V&B|q?7S~X{T%ew*Rc4OL zI`LOz(Sw|*V(fUbKwMeGD05NAv-D#*lrlJ0wNk(SDI<(PV46^V&9b8&oJusWS7=AV^kcGwTJmkH6=FdO3s!pv1ueM?u}a4OZ%~tOA(a z70M$A(WWbvJqPmdl4dJZ%7>p8+RKB(2MRy6iAa-ypO32k8o+M|0>saJ6!Xp*|0{78 zUMGe7yl*lGE8@1vkwV7>||?F0>%{e_*~{0(ren*Gqx$h9D51Z9^=M{U8xqRLU?QB53Z z!z;`hP%5fDcYzR%`UhK57daMCfp~{E91q!S=uAUevhn%Y2p-`0)Z7 z6CKWUwF9bMhUuNlwO5ShWQjS#Wm)4kYP(9XZ-l-F<4X0sIqRSJI{|jCydZ2}gN&wP z7c_3}&^zi1$c{f8w6a;?GS@Q9GSP-dkwx)<@+>)bI*Y?&wWlI*7F$K5PeIA;fHp9A&Kn8FK8?_~%bj@tX ztiY-_Z@7M2xzph{*Ez5GJrD;TtHoJ5T3$_ME4wE}A|Bli-K;DA^a1;=?ySuXYK&U% z^zkL3Q!BR`5;pb>-97(!Vx;HU?Xdix!Nsa3b0S;xhDub)0z6H$pwGXnNyitrhd(Fs zT=+#z4;!3~SpJ@KJ{E%Gju=nJ=brD=QSJF5`&UXucI{(;Z&Wtlm+|yLRPV zL2yoQK?IkUa^9JyyUJe|rAH*E3({c?mbPmytB6FAHoz}(4L7^f3|mke`P0F4Oit3x z9*TTbRe~ZVsom>)Anl<-I zW3>hY+NzM(L6&}v*Ej0J=%V4RQrSW1CLf{*vvvAgaB2b!`ce_-a;11#ghX|Ep+Fu$1-lw`6uI z4!k-TG-EMc13V}=Lx0~to5TJWl>&beUcM?=tWN|$*$+(z)%Xsr7j7n*kP8@4cs=ZM z0_yPWyTXmJ52(uh$V&F)TqL47z75b*V7qU6J>0-vPhn+>!T%!bA--z{)Fs@v*`)m8 zR8%d+^V?~@tgui(|7tXKi_ZP?Vkmiy=)flL9jrqQ z%23#@;=NH6RatanW}e9PDR{lYj`LKsQp};~C(F#K2cpKCjPgHvPY0i2xwK|xId@14 zkp*{(DhbGZ905l9M@d9?>QqppR>^Bni56}cybYg|v zW@PZ|)_H^yL+GYS@~i+Vm_(o$=1a7$$ycjM`q^)6VW5Onlq?floI|pI?$+J+_6FxQ z*X1X0=mO@pyOHBGqBv*^X|m#3GVHw8Y_i(RRi!&MU%>H$(DyV0u0|mc?ch!ForN3p zWJ-G7rz&_YZW}8iB$Y(^P>mwY7Uu&6efE1FjZ=mgU?{MZ$}}jh4#A|9P;QjW{uaSJ z*9Vz7kh=;x_nJ`ALCtkS%HCgt(Zjw0F0Jy5l~Ru#ajKu8o6hD$CvGQo!5-DkA(dqX@jQ#4j8`Zb(~>$ms~YgCbhOE1 zxKw+-#MdHuc^!~DDn!AkK}jI`14rJ?Xgy1RCZm4Dc=xZo@eZ2y^O{z9Jq`=Pd`U_Q zzmMpXU|S&`$_6cs%=K<%2iWj0MftYpENv5lCOR1mg-^tGuCpnF$g@v~X{vd_#_9cI ztL52@DVoXF4KH#t|3>GaD0o6Zu->Y@8#O+;!R%yH+HZ&CO6juj1aN3#63N1dg8u!W z%5dz`Nn|IzjgNkQ|MAg+QSoD?dbh#rBT)nAgw(fx4Kg~s{B5L&6~CbnEvd&W?)B81 zC9qUu+8cqPQjNefvTPfsk9jqitdHwf?V2e7*PXua25Jpy8KJL1n6XM4DCr}l7IC!p z`l&Hbxq}pZ!^nYbd7{!#J&f%@)pSc;P;Sy3{I(pEpc&|CfA%^X&@P>UdYsqOvTlMm zUFgJKAAoMIr)jn8^t|KJ8gt*<=hy9NTqFcKdzkB5V!Kt!>Q zwgEbi?3g1u>V_uyb|u(rWk7^|Dw&ZG`?7WhrcA^&w%%m_gHI!O2X{?4$beU z_rOyp5C;G)I{X2n8n1(BB9JN-ta4GS9C zQGe?G7WA~7)9())`vktjM^{DfKbje_=*pSizY$C>7`Kt#lV2K(KKLruy&K$M`B-tJ z9q5e$7E8kZ=Kup)?Y0uA(-2yo8GWL1a8MrneE;o$x3tZI10##b#V^6!Ay7r#DayFm)%S5if zL&IpnMts>h?)eKp6OLuDx(%Wxfj5?Ass^I8e~^8)?)qAGhF<6V$;t3DNuQNO7QOV1QG-unCDcjU%U!Tq4rBFytGEKb z`Tx7Jw`sU_!XS7%dX5BqAybn#Lw|HfFi>Y)nXlVjM*J2g3rc@Gm15SgN&Lc}{=e*? eUblT{W0&d5Yu16Ir2_E9E@特别说明:通信(Uncovered Communication Time)和空闲(Free Time)耗时会受profiling性能膨胀的影响,以L0 + NPU采集的profiling为准。" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "36b7a24cc7ca5da2", - "metadata": { - "ExecuteTime": { - "end_time": "2023-11-21T12:53:38.379699800Z", - "start_time": "2023-11-21T12:53:38.363755900Z" - }, - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [], - "source": [ - "# 数据准备 EDIT THE PROFILING DATA PATH\n", - "profiling_path = \"YOUR PATH\"\n", - "# 若您有GPU上采集到的性能数据,可将NPU的性能数据与GPU之间进行对比,分析性能差距。输入GPU的性能数据路径\n", - "gpu_profiling_path = \"\" #默认为空,若有则可填写\n", - "interface = Interface(profiling_path)" - ] - }, - { - "cell_type": "markdown", - "id": "cf832ac2e0dfa30f", - "metadata": { - "jupyter": { - "outputs_hidden": false - } - }, - "source": [ - "## 1) 性能拆解分析" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "40aac93278dd6e34", - "metadata": { - "ExecuteTime": { - "end_time": "2023-11-21T12:53:41.815599700Z", - "start_time": "2023-11-21T12:53:41.783393700Z" - }, - "jupyter": { - "outputs_hidden": false - }, - "scrolled": false - }, - "outputs": [], - "source": [ - "print(\"Start performance analysis, please wait...\")\n", - "dataset = interface.get_data('overall', 'summary', base_collection_path=gpu_profiling_path)\n", - "data = dataset.get('data', {}) or {}\n", - "bottleneck = dataset.get('bottleneck', {}) or {}\n", - "print(\"Performance analysis is complete, you can edit the data to show what you want.\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3f353506", - "metadata": {}, - "outputs": [], - "source": [ - "# 等待性能分析完成后再查看数据" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "cd3fceda-49f0-439f-9c54-cc31490fc99e", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiIAAAH2CAYAAABN8+eOAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAABsjklEQVR4nO3dd3hTZcMG8DujTZqme6V0Ugod0LK3LAFZKoggr2wHoIKAivI6UFTgE9RXHAwXQ0QEURFBQJkCsvcoFAoFCi0tdO82Od8fpZHQAi00fTLu33X1gqQn59xN193nPOeJTJIkCUREREQCyEUHICIiIvvFIkJERETCsIgQERGRMCwiREREJAyLCBEREQnDIkJERETCsIgQERGRMCwiREREJAyLCBEREQnDIkJWY9GiRZDJZJDJZNi6dWuF90uShPDwcMhkMnTu3LlGjy2TyTB16tRqPy4xMREymQyLFi2q0nblb3K5HF5eXujduzd27dp1b6Hv4K233kJwcDCUSiXc3d0BAJ07d67wvN36cW/duvW2z785hYaGmjw/t3tbtGgRpk6dCplMVqv57ubmjAqFAh4eHmjcuDHGjBmD3bt3V9i+ql83t/rhhx8we/bsaj2msmOVP4fXrl2r1r7u5OTJk5g6dSoSExMrvG/kyJEIDQ2tsWORdVGKDkBUXS4uLvj2228r/NLctm0bEhIS4OLiIiZYDXjxxRcxePBg6PV6nDhxAu+++y66dOmCXbt2oWnTpjVyjN9++w3Tp0/Hm2++iV69ekGlUgEA5s6de9fHNmvWDLt27UJ0dHSNZKmqX3/9FUVFRcbb33zzDb799lusX78ebm5uxvvr1auHoqIi9OzZs1bzVcWAAQPwyiuvQJIkZGdn4/jx4/juu+/w1VdfYfz48fj000+N2/r7+2PXrl2oV69etY7xww8/4Pjx45g4cWKVH3Ovx6qukydP4t1330Xnzp0rlI4pU6ZgwoQJZj0+WS4WEbI6gwYNwtKlSzFnzhy4uroa7//222/Rtm1bZGdnC0x3f4KDg9GmTRsAQPv27REeHo6uXbti7ty5+Prrryt9TEFBAdRqdZVHAY4fPw4AGD9+PHx9fY33V6VcuLq6GvPVpltL2Pr16wEAzZs3h7e3d4XtAwMDayVXdfj5+Zk8dz169MDEiRMxevRofPbZZ4iMjMTzzz8PAFCpVGZ/nvV6PUpLS2vlWHdj7hJElo2nZsjqPPnkkwCAZcuWGe/LysrCzz//jKeffrrSx6Snp+OFF15AQEAAHB0dERYWhjfffNPkr2wAyM7OxqhRo+Dl5QWtVouePXsiPj6+0n2eOXMGgwcPhq+vL1QqFaKiojBnzpwa+ijLlP+CuHDhAoB/T0/9+eefePrpp+Hj4wONRoOioiIYDAbMmjULkZGRUKlU8PX1xfDhw5GUlGTcX2hoKN566y0AZb8Ybz71UtmpmVtVdmpm5MiR0Gq1OHv2LHr37g2tVougoCC88sorFZ7fpKQkDBgwAC4uLnB3d8eQIUOwb9++ezoNcTuVnZoJDQ3Fww8/jDVr1qBp06ZwcnJCVFQU1qxZA6DseY2KioKzszNatWqF/fv3V9jv/v378eijj8LT0xNqtRpNmzbFihUr7iurQqHAF198AW9vb3z44YfG+ys7XZKWlobRo0cjKCgIKpUKPj4+aN++PTZu3Aig7PO3du1aXLhwweRU0M37mzVrFqZNm4a6detCpVJhy5YtdzwNdOnSJfTv3x+urq5wc3PD0KFDkZaWZrLN7U5bhoaGYuTIkQDKnt+BAwcCALp06WJyKg2o/NRMYWEhXn/9ddStWxeOjo4ICAjA2LFjkZmZWeE4Dz/8MNavX49mzZrByckJkZGRWLBgwV2efbIULCJkdVxdXTFgwACTHzTLli2DXC7HoEGDKmxfWFiILl264LvvvsPLL7+MtWvXYujQoZg1axb69+9v3E6SJPTr1w9LlizBK6+8gl9//RVt2rRBr169Kuzz5MmTaNmyJY4fP46PP/4Ya9asQZ8+fTB+/Hi8++67Nfaxnj17FgDg4+Njcv/TTz8NBwcHLFmyBCtXroSDgwOef/55TJ48Gd27d8fq1avx/vvvY/369WjXrp3xXP+vv/6KZ555BkDZqMKuXbvw7LPP3nfOkpISPProo+jatSt+++03PP300/jkk08wc+ZM4zZ5eXno0qULtmzZgpkzZ2LFihXw8/Or9HNmDkeOHMHrr7+OyZMn45dffoGbmxv69++Pd955B9988w1mzJiBpUuXIisrCw8//DAKCgqMj92yZQvat2+PzMxMzJ8/H7/99huaNGmCQYMG3XeBcnJyQrdu3XD+/HmT0nirYcOGYdWqVXj77bfx559/4ptvvkG3bt1w/fp1AGWn1tq3bw+dToddu3YZ32722WefYfPmzfjoo4+wbt06REZG3jHbY489hvDwcKxcuRJTp07FqlWr0KNHD5SUlFTrY+zTpw9mzJgBAJgzZ44xW58+fSrdvvx78aOPPsKwYcOwdu1avPzyy1i8eDEefPDBCgX3yJEjeOWVV/DSSy/ht99+Q2xsLJ555hn8/fff1cpJgkhEVmLhwoUSAGnfvn3Sli1bJADS8ePHJUmSpJYtW0ojR46UJEmSGjZsKHXq1Mn4uPnz50sApBUrVpjsb+bMmRIA6c8//5QkSZLWrVsnAZA+/fRTk+2mT58uAZDeeecd4309evSQAgMDpaysLJNtx40bJ6nVaik9PV2SJEk6f/68BEBauHDhHT+28u1mzpwplZSUSIWFhdKBAwekli1bSgCktWvXmjwHw4cPN3l8XFycBEB64YUXTO7fs2ePBEB64403jPe98847EgApLS3NZNtOnTqZPG+SJFX4uMuf9y1bthjvGzFiRKXPb+/evaWIiAjj7Tlz5kgApHXr1plsN2bMmCo9Rze73cdw8/tuFhISIjk5OUlJSUnG+w4fPiwBkPz9/aW8vDzj/atWrZIASKtXrzbeFxkZKTVt2lQqKSkx2e/DDz8s+fv7S3q9/o55AUhjx4697fsnT54sAZD27NkjSVLlXzdarVaaOHHiHY/Tp08fKSQkpML95furV6+eVFxcXOn7bj5W+XP40ksvmWy7dOlSCYD0/fffm3xsN3+NlAsJCZFGjBhhvP3TTz9V+NopN2LECJPc69evlwBIs2bNMtlu+fLlEgDpq6++MjmOWq2WLly4YLyvoKBA8vT0lMaMGVPhWGR5OCJCVqlTp06oV68eFixYgGPHjmHfvn23PS2zefNmODs7Y8CAASb3lw8bb9q0CUDZX70AMGTIEJPtBg8ebHK7sLAQmzZtwmOPPQaNRoPS0lLjW+/evVFYWFjplRBVMXnyZDg4OECtVqN58+a4ePEivvzyS/Tu3dtku8cff9zkdnn28o+pXKtWrRAVFWX8GM1FJpPhkUceMbkvNjbWeEoJKJtM7OLiUmEiafmpNnNr0qQJAgICjLejoqIAlJ3S0Gg0Fe4vz3727FmcOnXK+HVx6+c7OTkZp0+fvq9skiTddZtWrVph0aJFmDZtGnbv3l3tUQkAePTRR+Hg4FDl7W/9XnjiiSegVCqNX2/msnnzZgAVv54HDhwIZ2fnCl/PTZo0QXBwsPG2Wq1GgwYNTL7+yHKxiJBVkslkeOqpp/D9999j/vz5aNCgATp06FDpttevX4dOp6swb8DX1xdKpdI4tH39+nUolUp4eXmZbKfT6Srsr7S0FJ9//jkcHBxM3soLw71e9jhhwgTs27cPBw4cQEJCApKTkzF69OgK2/n7+1fIVNn9AFCnTh3j+81Fo9FArVab3KdSqVBYWGiS0c/Pr8JjK7vPHDw9PU1uOzo63vH+8uxXr14FAEyaNKnC5/uFF14AcO+f73LlvzDr1Klz222WL1+OESNG4JtvvkHbtm3h6emJ4cOHIyUlpcrHqezr405u/dov//4w99dT+ffirackZTIZdDpdhePf+j0LlH393Xx6jSwXr5ohqzVy5Ei8/fbbmD9/PqZPn37b7by8vLBnzx5IkmRSRlJTU1FaWmq86sLLywulpaW4fv26yQ+2W3/Qe3h4QKFQYNiwYRg7dmylx6xbt+49fUyBgYFo0aLFXbe7tVSV501OTq5wxciVK1cqvbKktnl5eWHv3r0V7q/OL1IRyp+7119/3WRO0c0iIiLuef8FBQXYuHEj6tWrd8erfby9vTF79mzMnj0bFy9exOrVq/Hf//4XqampxquI7qa666ukpKSYjCJV9v2hUqkqzNkAcF9lpfx7MS0tzaSMSJKElJQUtGzZ8p73TZaHIyJktQICAvDqq6/ikUcewYgRI267XdeuXZGbm4tVq1aZ3P/dd98Z3w+UzeYHgKVLl5ps98MPP5jc1mg06NKlCw4dOoTY2Fi0aNGiwltlf6GZ04MPPggA+P77703u37dvH+Li4owfo0idOnVCTk4O1q1bZ3L/jz/+KChR1URERKB+/fo4cuRIpZ/rFi1a3PPaNXq9HuPGjcP169cxefLkKj8uODgY48aNQ/fu3XHw4EHj/TU9CnDr98KKFStQWlpqcnVVaGgojh49arLd5s2bkZuba3Jf+Xo1VclX/vV669fzzz//jLy8PIv4eqaawxERsmoffPDBXbcZPnw45syZgxEjRiAxMRExMTHYsWMHZsyYgd69e6Nbt24AgIceeggdO3bEa6+9hry8PLRo0QI7d+7EkiVLKuzz008/xQMPPIAOHTrg+eefR2hoKHJycnD27Fn8/vvvxnPctSUiIgKjR4/G559/Drlcjl69eiExMRFTpkxBUFAQXnrppVrNU5kRI0bgk08+wdChQzFt2jSEh4dj3bp12LBhAwBALrfcv4u+/PJL9OrVCz169MDIkSMREBCA9PR0xMXF4eDBg/jpp5/uuo+rV69i9+7dkCQJOTk5xgXNjhw5gpdeegmjRo267WOzsrLQpUsXDB48GJGRkXBxccG+ffuwfv16k1GamJgY/PLLL5g3bx6aN28OuVxepRG22/nll1+gVCrRvXt3nDhxAlOmTEHjxo3xxBNPGLcZNmwYpkyZgrfffhudOnXCyZMn8cUXX5gsNAcAjRo1AgB89dVXcHFxgVqtRt26dSst7d27d0ePHj0wefJkZGdno3379jh69CjeeecdNG3aFMOGDbvnj4ksD4sI2Ty1Wo0tW7bgzTffxIcffoi0tDQEBARg0qRJeOedd4zbyeVyrF69Gi+//DJmzZqF4uJitG/fHn/88UeFyxyjo6Nx8OBBvP/++3jrrbeQmpoKd3d31K9fv8LE0toyb9481KtXD99++y3mzJkDNzc39OzZE//3f/9X6yM0lXF2dsbmzZsxceJEvPbaa5DJZHjooYcwd+5c9O7d27jUvCXq0qUL9u7di+nTp2PixInIyMiAl5cXoqOjTX4p38nKlSuxcuVKyOVyaLVahISEoG3btpg/f/5dFxRTq9Vo3bo1lixZgsTERJSUlCA4OBiTJ0/Ga6+9ZtxuwoQJOHHiBN544w1kZWVBkqQqTYS9nV9++QVTp07FvHnzjBOSZ8+ebZxHAwCvvvoqsrOzsWjRInz00Udo1aoVVqxYgb59+5rsq27dupg9ezY+/fRTdO7cGXq9HgsXLqwwIRUoO4W0atUqTJ06FQsXLsT06dPh7e2NYcOGYcaMGcbRFbINMul+vkqJiO7TjBkz8NZbb+HixYsWuSIqEZkXR0SIqNZ88cUXAIDIyEiUlJRg8+bN+OyzzzB06FCWECI7xSJCRLVGo9Hgk08+QWJiIoqKioynF8qXnSci+8NTM0RERCSM5U5TJyIiIpvHIkJERETCsIgQERGRMCwiREREJAyLCBEREQnDIkJERETCsIgQERGRMCwiREREJAyLCBEREQnDIkJERETCsIgQERGRMCwiREREJAyLCBEREQnDIkJERETCsIgQERGRMCwiREREJAyLCBEREQnDIkJERETCsIgQERGRMCwiREREJAyLCBEREQnDIkJERETCsIgQERGRMCwiREREJAyLCBEREQnDIkJERETCsIgQERGRMCwiREREJAyLCBEREQnDIkJERETCsIgQERGRMCwiREREJAyLCBEREQnDIkJERETCsIgQERGRMCwiREREJAyLCBEREQnDIkJERETCsIgQERGRMCwiREREJAyLCBEREQnDIkJERETCsIgQERGRMCwiREREJAyLCBEREQnDIkJERETCsIgQERGRMCwiREREJIxSdAAioltlF5YgI68YGfklyCsqRbHegFK9hFK9ASUGCSWlBuglybi97KbHOijkcFYp4aJWQnvjXxe1A1zUSjgo+LcXkaVhESEis9MbJFzOKEDi9TykZBciM7+sZJSVjWJk5JWU/ZtfjMz8EpQapLvv9B6olHJjMSkvKW5ODvB3c0KghxMCPMr+DXTXwE3jYJYMRGRKJkmSeb7jiciu3Fw2Eq/nIfFa/o1/85CUUYBivUF0xGpxUSkR4OGEAPebS4oGwZ4ahPtqoXZQiI5IZBNYRIio2i5nFuDopUwcScpC/NUcqy0b90ohl6GejzMa1nFDwzquiPZ3RcM6bhxFIboHLCJ0T0JDQzFx4kRMnDhRaI7OnTujSZMmmD17ttActiwrvwRHkjJx5FJm2b9JWUjLKRIdyyIFuDshuo4rGtZxNZaUOu5OomMRWTQWETNLSUnB9OnTsXbtWly+fBm+vr5o0qQJJk6ciK5du4qOd1eLFi3CxIkTkZmZaXJ/WloanJ2dodFozHLcrVu3okuXLnfcZuHChXj00Ufh4OAAFxcXs+SwN8WlBhy7nPVv6biUiQvp+eBPiXvnrXVE67peaFvPC+3DvVHX21l0JCKLwsmqZpSYmIj27dvD3d0ds2bNQmxsLEpKSrBhwwaMHTsWp06dEh3xnvn4+Jh1/+3atUNycrLx9oQJE5CdnY2FCxca73Nzc4OTE//avF8Xr+dja3wq/o5Pw66E68gr1ouOZFOu5RZj7bFkrD1W9vVcx02NtvW80e5GMdG5qQUnJBKL17KZ0QsvvACZTIa9e/diwIABaNCgARo2bIiXX34Zu3fvNm538eJF9O3bF1qtFq6urnjiiSdw9epV4/unTp2KJk2aYMGCBQgODoZWq8Xzzz8PvV6PWbNmQafTwdfXF9OnTzc5vkwmw7x589CrVy84OTmhbt26+Omnn4zv37p1K2Qymclox+HDhyGTyZCYmIitW7fiqaeeQlZWFmQyGWQyGaZOnQqg7NTMzadDZDIZvvnmGzz22GPQaDSoX78+Vq9ebZJn9erVqF+/PpycnNClSxcsXry4wvHLOTo6QqfTGd+cnJygUqkq3Ne5c2eT00OhoaGYNm0ahg8fDq1Wi5CQEPz2229IS0szPscxMTHYv3+/yfH++ecfdOzYEU5OTggKCsL48eORl5d3t0+xVcovLsWmuKt4+7fj6PzhFnT8cAve/u0ENsalsoTUgitZhfj5YBJe+ekI2vzfJjz40Va8+esx/HEsGRl5xaLjEdU6FhEzSU9Px/r16zF27Fg4O1ccinV3dwcASJKEfv36IT09Hdu2bcNff/2FhIQEDBo0yGT7hIQErFu3DuvXr8eyZcuwYMEC9OnTB0lJSdi2bRtmzpyJt956y6TgAMCUKVPw+OOP48iRIxg6dCiefPJJxMXFVeljaNeuHWbPng1XV1ckJycjOTkZkyZNuu327777Lp544gkcPXoUvXv3xpAhQ5Ceng6gbHRowIAB6NevHw4fPowxY8bgzTffrFKO6vrkk0/Qvn17HDp0CH369MGwYcMwfPhwDB06FAcPHkR4eDiGDx+O8rOSx44dQ48ePdC/f38cPXoUy5cvx44dOzBu3Diz5BPhVEo2vtyWgCHf7EaT9/7CM4v347tdF5B4PV90NLt37loelu65iBeWHkSzaX+h75ydmL8tAZfS+bkh+8BTM2Zy9uxZSJKEyMjIO263ceNGHD16FOfPn0dQUBAAYMmSJWjYsCH27duHli1bAgAMBgMWLFgAFxcXREdHo0uXLjh9+jT++OMPyOVyREREYObMmdi6dSvatGlj3P/AgQPx7LPPAgDef/99/PXXX/j8888xd+7cu34Mjo6OcHNzg0wmg06nu+v2I0eOxJNPPgkAmDFjBj7//HPs3bsXPXv2xPz58xEREYEPP/wQABAREYHjx49XGMWpCb1798aYMWMAAG+//TbmzZuHli1bYuDAgQCAyZMno23btrh69Sp0Oh0+/PBDDB482DiyUr9+fXz22Wfo1KkT5s2bB7XaOofOj1zKxG+Hr2Dd8WQkZxWKjkNVIElln7cjlzLxwbpTiAlwQ68YHfrE+CPEi3NLyDaxiJhJ+V/bMpnsjtvFxcUhKCjIWEIAIDo6Gu7u7oiLizMWkdDQUJMJmX5+flAoFJDL5Sb3paammuy/bdu2FW4fPnz4nj6mu4mNjTX+39nZGS4uLsY8p0+fNn4s5Vq1amX2HH5+fgCAmJiYCvelpqZCp9PhwIEDOHv2LJYuXWrcRpIkGAwGnD9/HlFRUWbJaQ7nr+Vh1aHL+P3IFZy7ZpunluzJsctZOHY5C7PWn0bDOq7oHeOP3jH+nPBKNoVFxEzq168PmUyGuLg49OvX77bbSZJUaVm59X4HB9P1CWQyWaX3GQx3X8ehfL/lJebmC6dKSkru+vjbuVOeyj5Oc12wdXOO8mNWdl95NoPBgDFjxmD8+PEV9hUcHGyWjDUpNbsQq49cwW+Hr+DY5SzRcchMTlzJxokr2fhww2lE+buidyMd+sT6I8xHKzoa0X1hETETT09P9OjRA3PmzMH48eMrzBPJzMyEu7s7oqOjcfHiRVy6dMk4KnLy5ElkZWXVyF/iu3fvxvDhw01uN23aFMC/V74kJyfDw8MDACqMljg6OkKvv/8JjJGRkfjjjz9M7rt1wqgozZo1w4kTJxAeHi46SpVlFZRg/fFk/Hb4Cnafuw4zrYhOFiouORtxydn4+K94tAjxwJA2wegd4w+Vkqu9kvXhZFUzmjt3LvR6PVq1aoWff/4ZZ86cQVxcHD777DPjKZNu3bohNjYWQ4YMwcGDB7F3714MHz4cnTp1QosWLe47w08//YQFCxYgPj4e77zzDvbu3WuchBkeHo6goCBMnToV8fHxWLt2LT7++GOTx4eGhiI3NxebNm3CtWvXkJ9/bxPoxowZg1OnTmHy5MmIj4/HihUrsGjRIgB3P31lbpMnT8auXbswduxYHD58GGfOnMHq1avx4osvCs1VmQMX0vHiskNoOX0jJv98DP8ksITYu/0XMvDS8iNoM2MTpq89ifM8JUdWhkXEjOrWrYuDBw+iS5cueOWVV9CoUSN0794dmzZtwrx58wCU/RJetWoVPDw80LFjR3Tr1g1hYWFYvnx5jWR499138eOPPyI2NhaLFy/G0qVLER0dDaDsdMWyZctw6tQpNG7cGDNnzsS0adNMHt+uXTs899xzGDRoEHx8fDBr1qx7ylG3bl2sXLkSv/zyC2JjYzFv3jzjVTMqler+Psj7FBsbi23btuHMmTPo0KEDmjZtiilTpsDf319ornJFpXqsPJCERz7fgcfn7cLvR66guNQ+llKnqsvIL8HX28/jwY+3YvDXu7H2aDJK7GTJfbJuXFnVhslkMvz66693nKMi0vTp0zF//nxcunRJdBSLdDW7EEt2XcCyvRdxnetL0D3w1qrwRItAPNkqGEGe5lkFmeh+cY4I1Zq5c+eiZcuW8PLyws6dO/Hhhx/a1FodNWV/YjoW/pOIDcdTUMrzLnQfruUWYe7WBMzfloCODXwwqkMY2od7i45FZIJFhGrNmTNnMG3aNKSnpyM4OBivvPIKXn/9ddGxLEJRqR6rD1/B4l2JOH45W3QcsjEGCdh6Og1bT6ehabA7xnUJR9coP9GxiADw1AyRUEWleizbcxHztiXgajZf0ZZqT7S/K8Y9GI5ejXTCJ4yTfWMRIRKguNSA5fsvYe6Ws1z1lISq76vFC13q4dHGAVDIWUio9rGIENWiUr0BKw8k4fPNZ3E5s0B0HCKjEC8NnutUD483C4SjkhdUUu1hESGqBXqDhF8OlhWQi3wxM7JgddzUGNOpHv7TKogLpFGtYBEhMiODQcJvRy7js01nudAUWZVADye82iMCjzauwzkkZFYsIkRmsuFECj7ccBpnU3NFRyG6Z40D3fBG7yi0DvMSHYVsFIsIUQ07nZKD99acwM6z10VHIaox3aP98HqvSL7IHtU4FhGiGpKZX4z//RWPpXsuQs+FyMgGOShkeKp9XYzvWh9aFZehoprBIkJ0nwwGCUv3XMDHf8UjM79EdBwis/NxUeG1HhEY0DyQ80fovrGIEN2HI5cy8daq4zh2OUt0FKJa1zjIHVMfiUbTYA/RUciKsYgQ3YOsghLMWn8Ky/ZeBM/CkD2Ty4CR7eri1R4RcHLk5b5UfSwiRNX0y8EkzPgjDtdy+Yq4ROVCvDSY+Xgs2vDqGqomFhGiKkrLKcLrvxzFxrhU0VGILJJMBgxrE4LJPSPhzMmsVEUsIkRVsP54Mt749TjS8zgKQnQ3gR5OmPl4LNqHe4uOQlaARYToDrILS/DObyfw66HLoqMQWZ0nWwXhjd5RcFE7iI5CFoxFhOg2dp69hld/OoIrfHVcontWx02NGf1j0DnCV3QUslAsIkS3KCzR44N1p7B4VyL43UFUMwY2D8TURxty7ghVwCJCdJMjlzLx0orDOJfGF6gjqmn1fJwxd0hzROhcREchC8IiQgRAb5Dw2aYzmLPlLEq5MAiR2Tg5KPB+v0YY0DxQdBSyECwiZPcy8oox/sdD2H7mmugoRHZjYPNAvN+vEdQOXATN3rGIkF07fjkLY5YcwOXMAtFRiOxOpM4Fc4c04yv62jkWEbJbKw8k4c1fj6Go1CA6CpHd0qqU+ODxGDwcW0d0FBKERYTsTonegHd/P4Hvd18UHYWIbhjeNgRv9YmGo1IuOgrVMhYRsiup2YV4fulBHLiQIToKEd0iNtANcwY3Q5CnRnQUqkUsImQ39iWm44WlB5GWUyQ6ChHdhqezI74e3gLNQzxER6FawiJCdmHRzvOY/kccSvT8cieydCqlHJ8MaoLeMf6io1AtYBEhm6Y3SHjz12P4cd8l0VGIqBpkMuC/PSMxplM90VHIzFhEyGYVlugx7oeD2BiXKjoKEd2joW2C8e6jjaCQy0RHITNhESGblJlfjGcW7+ekVCIb8GCkL74Y3BQaR75OjS1iESGbcyWzACMW7MWZ1FzRUYiohjQKcMWCES3h66oWHYVqGIsI2ZQzV3MwfMFeJGcVio5CRDUswN0JC0a25Ivm2RgWEbIZ+xPT8czi/cgqKBEdhYjMxEWtxPyhzdE+3Ft0FKohLCJkE/46eRUvLjuIwhIu105k6xwVcswb2gxdo/xER6EawCJCVu/HvRfx5qrj0Bv4pUxkLxwVcswZ0gzdo1lGrB2LCFm1L7cl4P/WnRIdg4gEcFDI8MXgZujRUCc6Ct0HvroQWa1vd5xnCSGyYyV6CeN+OIj1x5NFR6H7wCJCVmnJ7gt4f81J0TGISLCyMnIIfxxjGbFWLCJkdZbvu4i3fzsuOgYRWYhSg4Txyw5hzdEroqPQPWARIavy66EkvP7LMXBmExHdrNQgYcKPh7H6CMuItWERIaux5ugVTPrpKHhxDBFVRm+Q8NLyw/jt8GXRUagaWETIKmw4kYKJPx7mJbpEdEd6g4SXVxzhaRorwiJCFm/LqVS8+MMhlLKEEFEV6A0SXl5+BLsSrouOQlXAIkIWbfuZNDz3/QEU67liKhFVXbHegDFL9uN0So7oKHQXLCJksY5fzsKYJQdQVMoSQkTVl11YipEL9yKFL4Jp0VhEyCKlZBXimcX7kF+sFx2FiKxYclYhRi7ci5xCvhimpWIRIYuTX1yKpxftw9XsItFRiMgGnErJwZglB1DM0VWLxCJCFsVwY2Gik8nZoqMQkQ35J+E6Xl15BHx5NcvDIkIWZdraOGyMSxUdg4hs0G+Hr2Dm+tOiY9AtWETIYizZfQELdp4XHYOIbNj8bQn4blei6Bh0ExYRsgjb4tPw7uoTomMQkR2YuvoENp68KjoG3cAiQsLFX83BuKUHuWAZEdUKgwS8tOIwEq/liY5CYBEhwdJyivDUwn3IKSoVHYWI7EhOYSme+/4ACku4RIBoLCIkTKnegOe/P4DLmQWioxCRHTqVkoM3fj0mOobdYxEhYT788zT2X8gQHYOI7NgvBy9j6Z4LomPYNRYREmLL6VR89fc50TGIiPDu7ydxNClTdAy7xSJCtS4lqxCvrDgCritERJaguNSA578/iIy8YtFR7BKLCNUqvUHC+B8PIZ3f8ERkQS5nFmDC8sMw8Oq9WsciQrVq9sZ47D2fLjoGEVEFf8en4dNNZ0THsDssIlRrdp69hjlbzoqOQUR0W59vPoOtp/kyE7WJRYRqRVpOESb8eBgc9SQiS2aQgJdXHEFaDl/9u7awiJDZGQwSJi4/hGu5/MYmIsuXnleMN7m+SK1hESGzm7PlLHaevS46BhFRlf158ipWHbosOoZdYBEhszqWlIXZnPxFRFbondUnkJpdKDqGzWMRIbMp1Rvw2s9HoefEECKyQlkFJfjvLzxFY24sImQ2X/59DnHJ2aJjEBHds82nUrFi/yXRMWwaiwiZRUJaLq/HJyKb8P6ak0jO4otzmotMkrjQNtUsSZLwxJe7sC+RL2hXXaU515C5dREKzh2AVFoMpWcdePWaAJUuHABwYebDlT7OvfNTcGv9eKXvK067gKwdS1GUchb67FR4PDgKri37mmyTe2ILMrcthlRSCG3sQ/Do8vS/mbKu4uryKfAfMRtylaaGPlIi69KhvjeWPNNadAybpBQdgGzP97svsITcA31hLlK+fw3q4Fj4DpwKhbM7SjKSIVc5G7cJHLvE5DEF5/bj+rrPoIlof9v9SqVFULrroIloj4zN31Q8bn4W0td/Dq/eE6F01yF15btQBcdAU68lAOD6hrnw6DSSJYTs2vYz1/DDnosY3DpYdBSbwyJCNepKZgFmrj8tOoZVyt69EkpXb3j3mWi8T+nmZ7KNQuthcjv/7B6oQ2Lg4K677X5V/g2g8m8AAMjYtrjC+0szUyBTaeAc1REAoA6ORcm1i0C9lsg7uRUyhRKaiHb3+mER2Yzpa0+iQ31vBHmylNckzhGhGvXmr8eQW1QqOoZVKji7B466+khb9X+49PkQXFk4HjmH1992e31eBgoS9kEb+9B9HVfpGQCppAjFVxOgL8hBcXI8HH1CoS/IQeb2pfDs/tx97Z/IVuQV6/EGFzqrcRwRoRrz2+HL2HI6TXQMq1WSmYKSQ3/AtWU/+LV9AkXJ8cjY9BVkSgdoG3WtsH3u8U2QOzpB0+D+RisUai28+7yEa2v+B6m0GM6NHoRTWHNc+2M2XJo/jNKsq0j9+X3AUAq39oPhHPnAfR2PyJptP3MN644lo1eMv+goNoNFhGpEel4x3v39pOgY1k2SoNKFw6PTCACAo189lFy7iJxDf1ReRI5uhHN0Z8iUjvd9aE2DdiaFpvDiUZSkXYBn9+dw5avR8H7kVSicPZD83ctQBzWCwtn9vo9JZK2mrY1D5whfODkqREexCTw1QzVi+to4pOcVi45h1RRaDzh4m06Ec/AKgj674ihT4aXjKE1Pgrbx/Z2WqYxUWoL0P+fBs8dYlGYkQzLooQ6OgYNXIBw8A1CUzDlAZN8uZxbwlcRrEIsI3bcjlzLxy6Ek0TGsniogGiXpps9jSfplKF19K2ybe/QvOOrC4egbVuM5Mv/5Eeqw5mWXDEsGwKA3vk8ylAIGQ40fk8jafLX9HBKv5YmOYRNYROi+vbfmJLgazf1zbdkXRVdOI2vXCpRkXEHeya3IPbIe2mZ9TLYzFOUj//SO205SvbbmY2RsW2S8LelLUHz1HIqvngMMpdDnXkfx1XMoybhS4bHFaReQf+pvuD8wFACg9AwEZHLkHPkT+Qn7UHI9CY7+9WvugyayUsWlBrz7+wnRMWwC54jQfVl95AoOXOCaITVB5d8APo+9icxti5G5cxmUbn7weHAUtA27mGyXF/c3IAHO0Z0q3U9pdhog+/dvDH1uOpIXjTfezt77C7L3/gJVUCPoBn9gvF+SJKRv+AIeD46C3FENAJA7qODVeyLS/5oHSV8Cz+7PQeniXZMfNpHV2nI6Ddvi09CpgY/oKFaNK6vSPSss0aPrx9twOZNLHxORfWrgp8W6CR2hkMtER7FaPDVD9+yb7edYQojIrsVfzcUPey6IjmHVWETonlzPLcL8bedExyAiEu6TjWeQVVAiOobVYhGhe/L55rNcQZWICGXrKH2xma82fq9YRKjaLl7Px1IORRIRGX236wJSswtFx7BKLCJUbR/+eRoles5xJiIqV1RqwLxtCaJjWCUWEaqWY0lZWHO04voTRET27oc9F5Gaw1GR6mIRoWqZvTGei5cREVWiqNSALzmJv9pYRKjKTqVkY/PpVNExiIgs1tI9F5CWUyQ6hlVhEaEqm7c1gaMhRER3UFhiwFd/c65IdbCIUJVcvJ6PNUeTRccgIrJ43+++iGu5HBWpKhYRqpIv/06A3sDhECKiuyko0ePrvzlXpKpYROiuUnMK8dOBpLtvSEREAIAluy/gOkdFqoRFhO7q2x3nUVxqEB2DiMhq5Bfr8fX286JjWAUWEbqjrIISLN19UXQMIiKrs2RXIjLyikXHsHgsInRHS3Yl8jVliIjuQV6xHsv3XxIdw+KxiNBtFZbosXBnougYRERW6/vdF2DgRP87YhGh2/px70Vc57AiEdE9S8oowBYuBHlHLCJ0W4t38RV2iYju13f8WXpHLCJUqV0J13H+Wp7oGEREVu/vM2m4cJ0/T2+HRYQq9eM+XilDRFQTJKlsrghVjkWEKsjML8a64ymiYxAR2YyfDiShsEQvOoZFYhGhCn45eJkLmBER1aDM/BKsPnJFdAyLxCJCFfC0DBFRzVvCSauVYhEhEwcuZCD+aq7oGERENufY5SwcupghOobFYREhEz/u5WgIEZG5cFSkIhYRMsopLMGao8miYxAR2aw/jicjjy+bYYJFhIx+O3wFBZzVTURkNoUlBmyMuyo6hkVhESEjTlIlIjK/349w5PlmLCIEADiVko3jl7NFxyAisnl/x6chu7BEdAyLwSJCAIB1x7iAGRFRbSjWG7CBi0YasYgQAGDDCX5TEBHVFl4Y8C8WEULitTycSskRHYOIyG7sPHsNGXnFomNYBBYRwnqOhhAR1apSg8TX9LqBRYSwnt8MRES1bs1RvvYMwCJi91KyCnEkKVN0DCIiu7P73HWk5hSKjiEci4id23AiBZIkOgURkf0xSLxiEWARsXs8LUNEJM7aY7x6hkXEjmXkFWNvYrroGEREduvQxQy7f+0ZFhE79tfJq9AbeF6GiEiUEr2E3eeui44hFIuIHeMiZkRE4m0/c010BKFYROxUid6AXXbewomILMGOsywiZIeOJmUiv1gvOgYRkd07m5qLlCz7vYyXRcRO7T7HSapERJZi+5k00RGEYRGxU/Y+OYqIyJLY8+kZFhE7VKo34MCFDNExiIjohp1nr0Gy09UlWUTs0JGkLM4PISKyINdyi3EyOVt0DCFYROwQT8sQEVmeHXZ6GS+LiB1iESEisjz2Ok+ERcTOlOoNOMj5IUREFmd/YoZdrnbNImJnjl7OQh7nhxARWZyCEj3OpuaKjlHrWETsDE/LEBFZrqNJmaIj1DoWETuz9zwXMiMislTHL2eJjlDrWETsjD1+kRMRWYtjdvgzmkXEjqTlFOFabrHoGEREdBsnk7PtbsIqi4gdOZVin4vlEBFZi8ISA86k5oiOUatYROzIqWT7+uImIrJGx5Ls6/QMi4gdieOICBGRxbO3eSIsInYkjiMiREQWj0WEbFKp3oAEO1woh4jI2sTZ2YRVFhE7kZCWh2K9QXQMIiK6i8ISA+Kv2s8INouIneAVM0RE1oNFhGwO54cQEVmPi9fzRUeoNSwidiIumSMiRETW4mI6iwjZGHsa5iMisnYsImRTSvQGXM0uFB2DiIiq6BKLCNmSlKxC2NGVYEREVi8luxDFpfZxpSOLiB1IzuJoCBGRNTFIQFKGfYyKsIjYgeSsAtERiIiomuxlngiLiB24kskRESIia2Mv80RYROwAR0SIiKwPR0TIZnBEhIjI+rCIkM3giAgRkfW5mG4fP7tZROwAr5ohIrI+9rL+E4uIjSss0SM9r1h0DCIiqqbsghLREWoFi4iN42gIEZF1KjVIyC0qFR3D7FhEbBznhxARWa8sOxgVYRGxcdkFtt+miYhsVVY+iwhZufxiFhEiImvFERGyennFetERiIjoHrGIkNXLt4OJTkREtsoerpxhEbFxHBEhIrJeHBEhq8cRESIi68UiQlaPIyJERNaLRYSsHq+aISKyXiwiZPXyijgiQkRkrfLs4PQ6i4iN44gIEZH10kuS6AhmxyJi4zhHhIjIeukNLCJk5Qo4IkJEZLUMHBEha1dqB22aiMhWcUSErJ5cJhMdgYiI7pHBIDqB+SlFByDzkrOHkA2r46bG813CRccgMhtfF5XoCGbHImLjOCJCtuzlhyIwoHmg6BhEdB94asbGyVhEyEZF6lzQv2mA6BhEdJ84ImLjFKyaZKMm94qEXC4rO4kul2PWvlko0dv+KpRkn1xVrnix6YuiY5gFi4iNU8rZRMj2tA3zQpcIX+B6AiBXIsVRhSUnl4iORWQ2OmedzRYR/paycSolP8VkW2Qy4PXekWU3tv8PcAvCqfRTYkMRmZlCphAdwWz4W8rGObKIkI3pE+OP2EB3IOkAcP0sIJeziJDNk8F25/vxt5SNUyltt0WT/XFQyPBqj4iyG3+9DegaAQBOp58WmIrI/OQy2/11bbsfGQEAVA78FJPtGNI6BCFezkD8BuDCDkAXAwAcESGbp5Db7h+V/C1l4zhHhGyFi0qJ8V3rl10ls3Fq2Z26GOQW5+Jy7mWh2YjMTaPUiI5gNvwtZeNcVLwwimzDmE5h8HR2BI78AKSeBGQKwLchTqWfggTbfz0Osm9aR63oCGbDImLjPJ1tf3lgsn1+rio880AYUFIAbJlRdqd3A8BBjdMZnB9Ctk/rwCJCVspL6yg6AtF9m9itAZwcFcDueUD2jdMwnB9CdsTZwVl0BLNhEbFx3iwiZOXCfbV4okUQkJ8O7Jj97ztuFBFeMUP2gCMiZLW8tDw1Q9bttR4RUMhlwN8fAUVZ/75DF4MSQwnOZp4VF46olnBEhKyWlzNHRMh6tQz1wEMNdUDGBWDf16bv1MXiXOY5lBj4+jJk+zhZlawWR0TImv23V1TZfzZPA/TF/77DpQ7g7MX5IWQ3eGqGrJabkwMc+RK8ZIV6NtSheYgHkHwEOPaT6Tv9YwFwoirZD56aIavmydMzZGWUchle61m+lPs7wK3rhJRPVOWlu2QnXBxdREcwGxYRO8BLeMnaDGoZhDAfLXB2E3BuS8UNeMUM2RlvJ2/REcyGRcQOcESErInGUYEJ3eoDkgRsfKfyjXQxuJx7GdnF2bUbjkgQnbNOdASzYRGxA96csEpW5NkOYfB1UQNHVwApxypuoHIFPOpyfgjZDUe5IzzVnqJjmA2LiB3QualFRyCqEm+tI8Z0DANKi8qulKmMXyNAJuNpGbIbvhpf0RHMikXEDtT1tt3Z1mRbxnetD2eVEtj7NZB1sfKNuLQ72RlbPi0DsIjYhXo+LCJk+ep6O2Nwq2CgIBPY/tHtN+REVbIzfs5+oiOYFYuIHQjztt2FcMh2vNojAkqFHNjxCVCQcfsNdTHIKsrClbwrtReOSCA/DYsIWTkPZ0d4aBxExyC6rSZB7ugd4w9kXQb2zL/9hnIHwDeKoyFkV1hEyCaE+XBUhCzX670iy/6zZTpQWnj7DX0iAKWK80PIrnCOCNmEME5YJQvVNdIXrcO8gKsngCPL7rwxV1QlO8QiQjaBIyJkiRRyGSaXj4ZsnApIhjs/gFfMkB0KdQ0VHcGsWETsRBivnCEL9HizADTwcwHObwfO/Hn3B+hiUKwvxrmsc+YPR2QB/J39oXHQiI5hViwidoKX8JKlUTvI8XL3iLKl3P96u2oP0sXgbOZZlBpKzRuOyELUc68nOoLZsYjYiWBPZyjkMtExiIyebl+3bNXfE78CVw7e/QFuwYCTB6+YIbtSz41FhGyEo1KOIA8n0TGIAAAeGgc817keoC8BNr1XtQdxfgjZIY6IkE1p4OciOgIRAGDcg/XhqnYA9i8AMs5X7UEsImSHWETIpjQN9hAdgQhBnk4Y1iYEKMoBts2q+gN1MZAkCfEZ8eYLR2RhWETIpjQNdhcdgQiTHoqAo1IO7PwUyL9W9QfqYpCUk4TcklzzhSOyIP7O/nB2sP0LDVhE7EjjQHcoOWGVBGoU4IpHG9cBclKAXXOq/kC1O+ARglMZPC1D9iPMPUx0hFrBImJHnBwViNBxngiJ83qvKMhkMmDr/wEl+VV/IOeHkB0KdwsXHaFWsIjYmWacJ0KCdGzgg/bh3kBaPHBwSfUeXL60Oy/dJTvSyKeR6Ai1gkXEznCeCIkglwH/7XljKfdN7wKSvno7uFFE4tLjajgZkeVq4tNEdIRawSJiZzgiQiL0axKA6DquwMXdwKk11d+BLgYZhRlIzU+t+XBEFshX42vzL3ZXjkXEzoR6O8PT2VF0DLIjjko5Xn6oQdmNP6dUfwcKR8AnkvNDyK409mksOkKtYRGxQ02D3EVHIDsyom0IAj00QNzvQNLe6u/AJxJQOHB+CNkVFhGyaZwnQrXFVa3E2C7hgL4U2Pjuve1EFwsAvHSX7AqLCNk0zhOh2vJCl3C4axyBQ98B18/c207KL929ziJC9sFB7oBor2jRMWoNi4gdahrsUbayJZEZ1XFTY2S7UKA4D9j6wb3vSBeDIn0RErMTayoakUWL8oqCo8J+5vLxt5EdcnJUoHVdT9ExyMa9/FAE1A6KshVUc6/e415kgC4GZzLOQF/dS36JrJQ9nZYBWETsVpcIX9ERyIZF6lzQv2kAkHcN2PnZve/IIwRQu/KKGbIr9rJ+SDkWETv1YCSLCJnP5F6RkMtlZadkinPufUdc2p3sjEKmQGv/1qJj1CoWETsV6u2Mut62/6qOVPvahnmVjbhdTwAOLLq/nZVfMcMiQnaikXcjuKncRMeoVSwidqxzhI/oCGRjZDLg9d7lS7m/BxhK7m+HuhgYJAPOZNzjFTdEVqZ9QHvREWodi4gd4+kZqml9YvwRG+gOJB0ATq66/x3qYnEx+yLyS6vxSr1EVuyBOg+IjlDrWETsWOu6XtA4KkTHIBvhqJDjtR43RkP+evv+d6jxAtwCuJAZ2Q0PlQcaejcUHaPWsYjYMUelHO3qeYuOQTZicOtgBHtpgPgNwIUd979Dv7KXQOfS7mQv2tRpA7nM/n4t299HTCZ4eoZqgotKifFd6wMGA7Bxas3s9MYVM3HpcTWzPyIL90CA/Z2WAVhE7F6XSE5Ypfs3plNY2as6H14KpJ6smZ3euGKGIyJkD2SQoV2ddqJjCMEiYuf83ZwQqXMRHYOsmJ+rCs88EAaUFABbZtTcjv1jca3gGq4VXKu5fRJZqEjPSHg72eepchYRwiON64iOQFZsYrcGcHJUALvnATlXamanSjXg3YCjIWQ3OgZ2FB1BGBYRQr+mAZDJRKcgaxTuq8UTLYKA/HRgx+ya27FvFCBXcCEzshs9QnuIjiAMiwghwN2JL4JH9+S1HhFQyGXA3x8BRVk1t2Mu7U52JNw9HPU96ouOIQyLCAEA+jcNFB2BrEzLUA881FAHZFwA9n1dszvn0u5kR+x5NARgEaEbesXooHbglwNV3X97RZX9Z/M0QF9cszvXxaKgtAAXcy7W7H6JLFCvur1ERxCKv3kIAOCidkC3KD/RMchK9GyoQ/MQDyD5CHDsp5rduUwO+DVEfEY8DJKhZvdNZGGiPKMQ4hoiOoZQLCJk1L9ZgOgIZAWUchle6xlRduOvtwFINXsAj7qASotT13lahmxfz7o9RUcQjkWEjDrW94G31lF0DLJwg1oGIcxHC5zdBJzbWvMHKJ+oyteYITvQM5RFhEWEjJQKOR6O5ZoidHsaRwUmdKsPSBKw8R3zHORGEeEaImTrYn1iUUfLn7ksImSCp2foTp7tEAZfFzVwdAWQcsw8B/FvDL1BjzMZZ8yzfyILwdGQMiwiZCI20B31fJxFxyAL5K11xJiOYUBpUdmVMuaii8GF7Aso1Bea7xhEgjnIHdAnrI/oGBaBRYQq+E/LYNERyAJN6FofziolsPdrIMtMl9U6+wIuOr7iLtm8rsFd4anmQpIAiwhV4omWQXB2VIiOQRakrrcznmwVDBRkAts/Mt+BdI0AcH4I2b6BDQaKjmAxWESoAjcnBwxozpVW6V+v9oiAUiEHdvwPKMgw34G4tDvZgVDXULTybyU6hsVgEaFKPdW+Ll8IjwAATYLc0TvGH8hKAvZ8ad6D3Vja/XQGR0TIdg1oMEB0BIvCIkKVCvV2RtdIX9ExyAK83iuy7D9bZgClZp5AqotFan4q0gvTzXscIkFUChX6hfcTHcOisIjQbT3dvq7oCCRY10hftA7zAq6eAI4sM+/BHDSAVzhPy5BN6x7SHW4qN9ExLAqLCN1Wu3BvRPm7io5BgijkMkwuHw3ZOBUw9+u++EYDcjmLCNk0TlKtiEWE7uip9qGiI5AgjzcLQAM/F+D8duDMn+Y/ICeqko0Ldw9HM79momNYHBYRuqO+TerAW6sSHYNqmdpBjpe7R5Qt5f7X27VzUC7tTjZuUMQg0REsEosI3ZFKqcCQ1lzgzN483b4udG5q4MQvwJWDtXNQ/8bIK8nDpZxLtXM8olrkofJA3/C+omNYJBYRuquhbULgqOSXir3w0Djguc71AH0JsOn92jmoTAH4NcTp9NOQINXOMYlq0X8i/wMnpZPoGBaJv13ornxcVHicL4ZnN8Y9WB+uagdg/wIg43ztHNSrHuDgxPkhZJPUCjWejHxSdAyLxSJCVfLig/U5KmIHgjydMKxNCFCUA2ybVXsHLp8fwoXMyAb1De8LD7WH6BgWi79ZqErquDtxrogdmPRQRFnh3PkpkH+t9g7MK2bIRillSoxsOFJ0DIumFB2ArMfYLuFYvu8S8ov1oqOQGTQKcMWjjesAOSnArjm1e3BdLEoNpTibcbZ2j2uB0takIftANoqSiyBzkEETroHuCR1U/v9evaYv1OPqT1eRfTAb+lw9HL0d4dndE14Pet1x31n7spD6ayqKU4vh6OsIv8f94Nr837WCMv/JRMrKFEhFEjw6eED3H53xfcVpxUj8KBH1ptaDwokvillVvcN6I9CFr911JxwRoSrz1qowsl2o6BhkJq/3ioJMJgO2/h9Qkl+7B9fF4lzWORQbimv3uBYo71QePB/0RNiUMIS+GgoYgMSPEmEo+ndBuZQfUpB7LBeBowNRf0Z9ePXwQvL3ycg+mH3b/eafzceleZfg3s4d4e+Fw72dOy7OvYj8hLLPdWlOKS4vvAz/Qf4IeSUEGTszkHM4x/j4K99dgd9AP5aQapDL5Hgm5hnRMSweiwhVy5hO9eCq5kCarenYwAftw72BtHjg4JLaPbhWB2h9uH7IDaGTQuHRwQPqADWcgp0Q8EwASq6XoCCxwLhNfkI+3Nu7QxulhaOPIzw7e0IdpEbB+YLb7vfan9egbaiFz8M+UNVRwedhH2ijtLj+53UAZSMeCicF3Fq7QROmgXOUMwqvlL22UOauTMiUMri14NLk1dE1uCvC3MJEx7B4LCJULW5ODhjdkd9YtkQuA/7b8+al3Gv51Bvnh9yRvqDs86Fw/nckQlNfg5zDOSjJKIEkSciNy0Xx1WJoY7S33U/B2QJoG5m+XxujRf7ZshERlZ8KhmIDCi4UoDS3FAXnC6AOUqM0txSpv6bCf6i/GT462yWDDKNjR4uOYRX4py1V21Pt62LRP4m4lsthdFvQr0kAouu4Ahd3A6fX1n4Arqh6W5IkIWVZCjQNNFAHqo33+w/1x5WFV3D6pdOAApDJZKjzVB04N3C+7b5Ks0qhdDX9ka90VaI0qxRAWdEJHBWIpK+TIBVLcG/nDpcYFyR9mwTPbp4ouVaCi59ehKSX4NvPF24tOTpyJz1DeyLSM1J0DKvAIkLV5qxS4vnO4Xh/zUnRUeg+OSrlePmhBmU3/pwiJoR/LADgVAZHRG6VvCQZhZcKEfam6Shk+l/pyE/IR/CEYDh6OyLvdB6SlyTDwd0B2oa3HxWB7Jbbt6wd59rc1WTyam5cLoqSilBnaB3ET45H0HNBULopkfBeApwjnCsUGyqjlCvxYtMXRcewGjw1Q/dkaJtg+Lup774hWbQRbUMQ6KEB4n4HkvaKCaGLRXJuMrKKssQc30JdWXIF2YezUfe/deHg6WC831BswNWVV+H/H3+4NnWFOkgNr25ecGvlhmvrbn/JtdLt39GPcqU5pVC6VV4mDCUGJC9JRp0RdVCcWgxJL8E50hkqfxVUOpVxkitVNKD+AAS5BomOYTVYROieqJQKvPhgfdEx6D64qpUY2yUc0JcCG98VE8JRC3iGcX7ITSRJKishB7JR97W6cPRxNH2/XoKklyr+9JaXPfZ2nMKdkHsi1+S+3OO50IRrKt0+bXUatDFaOIU6QTJIwL8X7UAqNb1N/9IoNXiu8XOiY1gVFhG6Z0+0CEQDvzsMA5NFe6FLONw1jsCh74DrZ8SE8GsIyGQ8LXOT5CXJyPwnE0HPBUGulqMkswQlmSUwFJf95lc4KaCJ0CBleUrZJNW0YmRsz0DmzkyT0ypJXyUh5acU423v7t7IPZ6LtLVpKLpShLS1acg9mQuvhyquPVJ4uRBZe7Pg198PAMrWMJEB6dvSkXM4B0XJRXAK4+umVGZEwxHwcrrzei5kiif46J4pFXJMfaQhBn+zR3QUqqY6buqyNWGK84CtH4gLwomqFaRvTgcAnP/A9HV+Ap4JgEeHsmXCg54PwtWVV5H0ZRL0eXo4eDnA73E/eHbxNG5ffL3YZE6Ipr6m7HE/X0XqL6lw9HVE0PNB0NQzHRGRJAlXFl6B7kkd5Kqyv1XljnIEPBuA5CXJkEok+A/zh4OHA8iUp9qTq6jeA5l0p7E8oioYu/Qg1h5LFh2DquGjgY0xoHkgsHUmsHWGuCCPfAY0H4GeP/fE5dzL4nIQ1YDXW72OwVGDRcewOjw1Q/ftzT5RcHLgaovWIlLngv5NA4DcNOCfz8SG0cUgpziHJYSsXpBLEAZGDBQdwyqxiNB9q+PuhLFd6omOQVU0uVck5HIZsG0mUJx79weYi1wJ+EZzoirZhPHNxsNBztNV94JFhGrEqI5hqOt9+8WUyDK0DfNClwhf4HoCcGCR2DBe9QEHNeeHkNVr498GPUN7io5htVhEqEaolApM69dIdAy6A5kMeL33jZUeN70HGErEBuLS7mQDHOWOeLP1m6JjWDUWEaox7cO98VjTANEx6Db6xPgjNtAdSDoAnFwlOo5xRdXTGRwRIev1VKOnEOoWKjqGVWMRoRr1Vp8ouGt4ntTSOCrkeK3HjdGQv94WG6acLgYl+hKczTwrOgnRPQnUBmJU7CjRMaweiwjVKC+t6t9XciWLMbh1MIK9NED8BuDCDtFxyuhikZCVgFJD6d23JbJAb7R+AyqFSnQMq8ciQjVuUMsgtKrrefcNqVa4qJQY37U+YNADf70jOk4Z1wBA48n5IWS1uod0R4fADqJj2AQWEapxMpkMHw9sDBcVF+61BGM6hcHT2RE4/AOQFic6ThmuqEpWTKPU4LWWr4mOYTNYRMgsgjw1eOfRhqJj2D0/VxWeeSAMKCkAtszA1K2FkL2bbfKm+yjnjvvYlliK5l/lQj0tG2Gf5mD+/mKT9/+VUIoGn+fC7YNsjFhVgGL9v4s1ZxVKaPB5Li5m3fIKabqyiaocESFr9EKTF6Bz1omOYTP4JyuZzYDmgdh86ir+OJZy943JLCZ2awAnRwWwfR6QcwUA0NBHjo3D/319EYXsdo8GzmcY0PuHfIxq5ojvH3PAzkt6vLC2ED4aGR6PdoBBkjDklwL89wFH9KinxICfCvD1gRKMbVX2irGTNxbiuRYOCHa75W8ejoiQlWro1RBDooaIjmFTWETIrGY8FoMDFzJwNbtIdBS7E+6rxRMtgoD8dGDHbOP9Sjmg01ZtMHT+/mIEu8kxu6caABDlo8D+K3p8tKsYj0c74Fq+hLR8CS+0dIRaKcOjDZQ4maYHAOy8WIr9V/SY01tdcce6GCTlJCGn5M6jMUSWRK1QY0aHGVDK+auzJvHUDJmVu8YRHw1sDNkd/uom83itRwQUchnw90dAUZbx/jPpBtT5OAd1P83Bf1bm41yG4bb72JWkx0Nhpj90e9RTYv8VPUr0Enw0MvhrZfgzoRQFJRK2X9Qj1k+BYr2E59cWYv7DTmUZbqZyAzxCORpCVmdCswkIcwsTHcPmsIiQ2XWo71P2kvNUa1qGeuChhjog4wKw72vj/a0DFPiunxM2DNXg60eckJIrod23ebieX3kZScmV4Kc1LRJ+WhlKDcC1fAkymQwrBjrh/b+LED03F011cjzd1AEf7ChG17pKOCmB9gvyEPFFLr7Ye2NuiV9DQCbDqQzODyHr0dq/NU/JmAnHl6hWTO4ZiX/OXsfpqxyKrw3/7RVV9p/N7wP6fyeX9qr/72JzMQDaBipQ77NcLD5SgpfbVr4ewq2DWdKNuajlo1wPBCuxb5TW+P7463osOVqCQ2Oc0XFhHia2cUTPcCUazc1DxxAFYltzoipZFxcHF0xrPw0yDu2aBUdEqFaoHRSY/Z8mcFTyS87cejbUoXmIB3DlMHBs5R23dXaUIcZPjjPXKx8R0WllSMmVTO5LzZOglANeThV/KEuShNG/F+Ljh1QwSMChFAMGRDvA11mOTqEKbEvU8zVmyOq83vp1XiVjRvytQLUmyt8Vkx5qIDqGTVPKZXitZ0TZjY3vAJDuuH1RqYS4NAP8XSr/UdA2UIG/zpmufPpnQila1FHAoZLLbb49VAIvjQyPRjhAf6PblOj//VcvSYAuBllFWUjJ49VUZPm6h3THI/UeER3DprGIUK169oEwPBDuLTqGzRrUMghhPlrg7Cbg3NYK75/0ZyG2JZbifIYBe5JKMeCnAmQXSRjRuOyUzesbCzH81wLj9s+1cMSFLANe3lCIuDQ9FhwqxreHSjCprWOFfafmGTDt7yJ8duMKGw8nGaK85Zi9uxi7LpVi0/lStAtWAT5RHA0hq+Dt5I2321jIazPZMBYRqlVyuQyfPdkUgR5OoqPYHI2jAhO61S+bxLGx8qXck7INePLnAkR8kYv+KwrgqAB2P+uMEPeyHwXJuZLJ4mN1PeT4Y7AGWxNL0eTLPLz/dxE+66XG49EVX9hwwvpCTGqnQoDrvz9WFvVzwo8nSvDwsgK82k6FVk0bAUpHFhGyeDLI8F679+CudhcdxebJJEm689gtkRmcuJKFAfN2oaB83J7u2/iu9fFy9wbAkeXAr6NFx6lc48HAY/PwxvY38Pu530WnIbqtUTGjML7ZeNEx7AJHREiIhnXcMHNArOgYNsNb64gxHcOA0iJg8zTRcW7vxkTVuHQLec0bokq08W+DcU3HiY5hN1hESJhHG9cp++VJ921C1/pwVimBvV8BWRdFx7k9XQyK9cVIzEoUnYSoUn4aP8zsOBNyGX891hY+0yTU5J6R6NjAR3QMq1bX2xlPtgoGCjKB7R+LjnNnuhicyTyDUqn07tsS1TIHuQM+7vwxPNWeoqPYFRYREkoul+Hz/zRFqJfm7htTpV7tEQGlQg7s+B9QkCE6zu25BQNO7lzanSzWpBaT0NinsegYdodFhIRz0zjgq+Et4OyoEB3F6jQJckfvGH8gKwnY86XoOHfmzxVVyXL1rtsbg6MGi45hl1hEyCI08HPBx0804YvjVdPrvSLL/rNlBlBaKDbM3XBFVbJQ4e7heKdt5Ze8k/mxiJDF6NlIh3FdwkXHsBpdI33ROswLuHoCOLJMdJy708VAkiTEZ8SLTkJk5OLggv91/h80Djw9LAqLCFmUl7s3QL8mdUTHsHgKuQyTy0dDNk4FpMpfK8ai6GJwKecS8kryRCchAgAo5Up80uUT1HWrKzqKXWMRIYsik8nw4cDG6BzBK2nuZECzQDTwcwHObwfO/Ck6zt2p3QH3YJ6WIYsyte1UtPZvLTqG3WMRIYvjoJBj3pDmZa8gSxWoHeR4qXuDsqXc/7KS18Hg/BCyMGNix6BveF/RMQgsImShnBwVWDCiJSL8XERHsThPt68LnZsaOPELcOWg6DhVo+MVM2Q5Hg57mCunWhAWEbJYbhoHfPdMKwR58gXyynloHPBc53qAvgTY9L7oOFV3Y0SEa4iQaC38WuC9du+JjkE3YREhi+bnqsaSp1vDW6sSHcUijHuwPlzVDsD+BUDGedFxqk4Xg/TCdKQWpIpOQnasrltdzO4yGw6Kiq8eTeKwiJDFC/V2xuKnW8JFrRQdRaggTycMaxMCFOUA22aJjlN1ChXgE8HTMiSUp9oTc7vOhZvKTXQUugWLCFmFhnXc8M3wFlAp7fdLdtJDEXBUyoGdnwL510THqTrfSEDhwNMyJIzWQYu5Xeci0CVQdBSqhP3+VCer0zrMC18MbgaF3P6WX20U4IpHG9cBclKAXXNEx6meG/ND4tLjBAche6RRajCv2zw09G4oOgrdBosIWZXu0X744smmcFTY15fu672iIJPJypZyL8kXHad6blwxwxERqm1qhRpfdP0CTXybiI5Cd2BfP83JJvSK8ceXw5vbzWmajg180D7cG0g7DRz6XnSc6tPFoLC0EBeyL4hOQnbEUe6IT7t8ipa6lqKj0F3Yx09ysjldInyx6KlWNv+KvXIZ8N+e5Uu5vwtIerGBqk0G+DXCmYwz0FtddrJWSrkSH3f+GO0C2omOQlXAIkJWq209Lyx5tjVcbfhqmn5NAhBdxxW4uBs4vVZ0nOrzCAXUrjiVwStmqHYoZArM7DATnYM6i45CVcQiQlatWbAHlo1uAy9nR9FRapyjUo6XH2pQduPPKWLD3Kvypd2vs4iQ+cllckx7YBoeCn1IdBSqBhYRsnoN67hh+Zi20LmqRUepUSPahiDQQwPE/Q4k7RUd596UL+3OEREyM4VMgXfbvYuHwx4WHYWqiUWEbEK4rxY/PdfWZpaDd3NywLgu9QF9adncEGuli4FBMuBMxhnRSciGOcgd8GGnD9EvvJ/oKHQPWETIZgR5arBiTFuE+TiLjnLfXuhcD24aB+DQd8B1K/4lrovBhewLKCgtEJ2EbJRGqcGcrnPQPaS76Ch0j1hEyKb4uzlhxZi2aBLkLjrKPavjpsaIdqFAcR6w9QPRce6dxgtwC+D6IWQ2bio3fPPQN2hbp63oKHQfWETI5nhrVfhxdJuylUit0MsPRUDtoAD++QLIvSo6zr3jiqpkRr4aXyzuuRgxPjGio9B9YhEhm6R2UOCzJ5vile4NILOiFeEjdS7o3zQAyE0D/vlMdJz7c6OIcESEalqwSzC+6/Ud6rnXEx2FagCLCNm0F7vWx9zBzeDkYB0Ln03uFQm5XAZsmwkU54qOc3/Kr5jhq+5SDYr0jMTiXosRoA0QHYVqCIsI2bxeMf746TnLv7y3bZgXukT4AtcTgAOLRMe5f7oYXCu4huuF10UnIRvRStcKC3osgLeTt+goVINYRMguNApww+px7dE40E10lErJZMDrvW8s5b7pPcBQIjbQ/VKqAe8GHA2hGvN4/ccxv/t8uDi6iI5CNYxFhOyGr6say8e0xcOx/qKjVNAnxh+xge5A0gHg5CrRce6fbzQgV7CI0H2Ty+SY1GISprabCge5g+g4ZAYsImRX1A4KfDG4GSZ2q28xk1gdFXK81uPGaMhfb4sNU1PKl3ZnEaH7oFFqMLvzbIxoOEJ0FDIjFhGySxO7NcBXw1rAQyP+L6zBrYMR7KUBTq8HLuwQHadm8IoZuk+B2kB83/t7dAnuIjoKmRmLCNmt7tF+WDehI1rX9RSWwUWlxPiu9QGDHtg4VViOGqeLRX5JPi7mXBSdhKxQa//W+PHhH1Hfo77oKFQLWETIrunc1Fg2qg1e7t4ACnntn6sZ0ykMns6OwOEfgDQbWfhLJgf8GiI+Ix4GySA6DVmZIVFDML/bfLipLHNiOdU8FhGye3K5DOO71sfy0W0Q4F57L5rn56rCMw+EASUFwJYZtXZcs/MMA1Razg+hatE6aPFhpw/x31b/hVKuFB2HahGLCNENLUI98cf4DujZUFcrx5vYrQGcHBXA7nlAzpVaOWat4ERVqqaGXg2x4pEV6BnaU3QUEoBFhOgmbhoHzB/WHNMfawS1g/m+PcJ9tXiiRRCQnw7smG224wjBiapUDcOih2FJryUIcgkSHYUE4fgXUSWGtA5BixBPvLjsIOKv1vxS66/1iCibk/L3h0BRVo3vXyhdLPQGPc5mnhWdhCyYu8od09pPQ6egTqKjkGAcESG6jQidC1aPewCjOtSt0YmsLUM98FBDHZCRCOz7psb2azF0MTifdR6F+kLRSchCNfNthp8e+cnuS8jIkSPRr18/0TGEYxEhugO1gwJv9onGb2Pbo1GAa43s87+9osr+s3kaoC+ukX1aDGdfwEWHUxmcH0IVyWVyjIoZhQU9FkDnbL65WCNHjoRMJqvwdvZs7YzSJSYmVnr8m9+mTp2KTz/9FIsWLaqVTJaMp2aIqqBRgBt+G/sAFu48j//9FY/8Yv097adnQx2ah3gAVw4Dx1bWbEhLwPkhdBuhrqF4v/37aOLbpFaO17NnTyxcuNDkPh8fnwrbFRcXw9HRsUaPHRQUhOTkZOPtjz76COvXr8fGjRuN92m1Wmi12ho9rrXiiAhRFSnkMjzbIQx/vtQRnSMq/kC7G6Vchtd6RpTd2PgOAKlmA1oCXjFDt1DIFHiq0VNY+ejKWishAKBSqaDT6UzeFAoFOnfujHHjxuHll1+Gt7c3unfvDgA4efIkevfuDa1WCz8/PwwbNgzXrl0z7k+SJMyaNQthYWFwcnJC48aNsXJl5X9MKBQKk+NqtVoolcoK9916aqZz58548cUXMXHiRHh4eMDPzw9fffUV8vLy8NRTT8HFxQX16tXDunXrTI53t+yWjkWEqJoCPTRY9FQrfPZkU3hrVVV+3KCWQQjz0QJnNwHntpovoEgcEaGbhLuH4/ve3+Pl5i9Dpaj694q5LV68GEqlEjt37sSXX36J5ORkdOrUCU2aNMH+/fuxfv16XL16FU888YTxMW+99RYWLlyIefPm4cSJE3jppZcwdOhQbNu2rcazeXt7Y+/evXjxxRfx/PPPY+DAgWjXrh0OHjyIHj16YNiwYcjPzweAKmW3dDJJkmzwzzKi2pGVX4IZf8RhxYFLuNN3ksZRga2vdoavVgV82QFIOVZ7IWvT2H1I0bii+8ruopOQQEqZEk/HPI3nYp+Dg6L2X89p5MiR+P7776FWq4339erVCz/99BM6d+6MrKwsHDp0yPi+t99+G3v27MGGDRuM9yUlJSEoKAinT59GQEAAvL29sXnzZrRt29a4zbPPPov8/Hz88MMPd8wzdepUrFq1CocPH66QMzMzE6tWrQJQNiKi1+uxfft2AIBer4ebmxv69++P7777DgCQkpICf39/7Nq1C23atLlr9gYNGlTvyROAc0SI7oObxgEzB8TisWYBePPXY0hIy6t0u2c7hMHXRQ0cWW67JcTBGfAKx+nL20UnIYEiPSPxXrv3EOUVJTRHly5dMG/ePONtZ2dn4/9btGhhsu2BAwewZcuWSudsJCQkICsrC4WFhcbTOOWKi4vRtGnTGs0dGxtr/L9CoYCXlxdiYmKM9/n5+QEAUlNTq5SdRYTITrQJ88KGiR2xdM9FfLrpDNLz/r0axlurwpiOYUBpUdmVMrbKLxqQyzk/xE45KZ3wbMyzeKrRU3CQi39Va2dnZ4SHh9/2fTczGAx45JFHMHPmzArb+vv74/jx4wCAtWvXIiAgwOT9KlXNnnJycDB97mQymcl9MpnMmLkq2a0BiwhRDVEq5BjRLhSPNQvAnM1nsfCfRBSXGjChazicVUrgn3lAlg2/Gm35/JAMzg+xNz1De+KVFq+Y9ZJcc2rWrBl+/vlnhIaGQqms+GsxOjoaKpUKFy9eRKdOlrX2yd2yWwNOViWqYa5qB7zeOwqbXu6Ep9qH4slWwUBBJrD9Y9HRzItXzNidSM9ILOyxEB92+tBqSwgAjB07Funp6XjyySexd+9enDt3Dn/++Seefvpp6PV6uLi4YNKkSXjppZewePFiJCQk4NChQ5gzZw4WL15s0dmtgXXWJyIrEOSpwTuPNCy7seN/QEGG2EDmpotFbnEuknKSRCchM/NQeWBc03EY0GAA5DLr/3u2Tp062LlzJyZPnowePXqgqKgIISEh6NmzJ+Tyso/v/fffh6+vL/7v//4P586dg7u7O5o1a4Y33njD4rNbOl41Q2RuWUnA582BUhte8lymAN64jAPpJzFy/UjRachMlDIlnoh4Ai80eQFuKjfRcchGcESEyNy2zLDtEgIAXuGAgxNPy9iw9nXaY1KLSQj3qHwCKNG9YhEhMrfwbsDFXUD6OdFJzIcLmdmsVrpWGNd0HJr61uxlqkTlWESIzK1RfyDqUeDQEmDbLCDniuhENY8TVW1OM99mGNtkLFr5txIdhWwciwhRbVAogRZPAY2fBPZ+Bez4BChIF52q5uhiUGIoQUJmgugkdJ9ivGMwrsk4tAtoJzoK2QlOViUSoTAb2D2vrJTkW8+LU93Wqwk4XXQdA34fIDoJ3aMozyiMbTIWnYIsa50Msn0cESESQe0KdJ4MtJ8AHP0R2DUXuGal8ytc/AFnb5xO+Ud0EroHLfxaYHj0cHQO6mxctZOoNrGIEInkoAaajwSajQDObgR2fWF9r8zL+SFWRylXokdoDwyPHo5or2jRccjOsYgQWQKZDKjfvewt5Tiwaw5wfCWgL777Y0XjFTNWw9XRFQMbDMSTkU/Cz9lPdBwiAJwjQmS5clLK5pDsX2DZq7IOXAQ0fAztl7VHdnG26DRUiRDXEAyJGoK+9fpC46ARHYfIBIsIkaUrzgeO/FA2jyTdAq9KefEgrqic0OPnHqKT0E2UMiU6BHZA//r90TGwo00sxU62iadmiCydowZo+SzQ4hkgfkPZeiRn/rSM0zaOLoBnGE5d2iI6Cd0Q7h6OfuH90CesD7ydvEXHIborFhEiayGTARE9y97y04FjK4Ejy4ArB8Vl8msIyGScHyKYi6MLeoX2wmP1H0Mj70ai4xBVC4sIkTXSeAKtR5e9pZ4qKyRHV9T+qq28YkYYuUyOVrpWeCz8MXQN6QqVQiU6EtE9YREhsna+kUD3d4FuU8te0+b4z8DJ34C8NPMfm0WkVinlSrTWtUbXkK7oEtSFp17IJnCyKpEtMuiBxO1lpSTud/NddTN6K7K966H9svbm2T/BSemEBwIeQNfgrugY2BEuji6iIxHVKBYRIlunLwEu7AQStgAJm4GUYwBq4NtergTeuIJ9147i6Q1P3//+yMhN5YZOgZ3QNbgr2tVpB7VSLToSkdnw1AyRrVM4AGGdy966vwvkXStbvfXcFiBhK5CddG/79W4AKFU8LVMDlHIlYr1j0aZOG7T1b4sY7xgo5ArRsYhqBYsIkb1x9gZiBpS9AUBa/I1SsgVI3AEU51RtP5wfcs8UMgUiPCPQ0q8lWvm3Qgu/FlxojOwWiwiRvfNpUPbWekzZaZyk/WWncM5tAa4cAgyllT+ORaTKtA5aRHlFoZFXI7TQtUAz32bQOmpFxyKyCJwjQkS3V1JYNqfkyiEg+XDZv2mnAUkPDF+NkpB2aPVDK5TerqzYIWcHZ0R5RiHaKxoNvRoi2isaIa4hfGVbottgESGi6inOLysnukaQHDRIyklCfEY84jPjcSbjDOIz4pGUkwS9pBed1KyUciUCtYEIcQ1BqGsoorzKykeoayhLB1E1sIgQUY0rNZQiOS8ZSTlJSMpNwuWcy0jKTTLezirKEh2xSpQyJQJcAhDsEowQ1xAEuwYjxKXsX39nf04oJaoBLCJEVOtyi3ORlJuE1PxUZBZlIqMwA1lFWcgouvFvYQYyizKNbzVx6kcGGVQKFVRKFZyUTvBUe8JL7VX2r5MXvNRe8HIyve2h9uCLxRGZGYsIEVm8In0RSvQlKDWUosRw+38lSGVl48abWqmGo8IRakXZv0RkeVj1a0hoaChmz54tOoZFqo3nZuvWrZDJZMjMzDTrce5m6tSpaNKkidAMtkilUEHrqIW72h0+Gh/4a/0R7BqMMPcwRHhGoKF3QzTxbYKmvk0R7RWNeu71EOgSCG8nb7g6urKEEFmwahWRzp07Y+LEiRXuX7VqFSdnVUF2djbefPNNREZGQq1WQ6fToVu3bvjll19gywNT+/btw+jRo2tsf5V9HbZr1w7Jyclwc3OrsePcSiaT3fFt5MiRmDRpEjZt2mS2DEREtobriNykuLgYjo7m+cspMzMTDzzwALKysjBt2jS0bNkSSqUS27Ztw2uvvYYHH3wQ7u7uZjm2aD4+PmY/hqOjI3Q6nVmPkZycbPz/8uXL8fbbb+P06dPG+5ycnKDVaqHVcn0IIqKqMsupmfLh6SVLliA0NBRubm74z3/+g5ycf1dsNBgMmDlzJsLDw6FSqRAcHIzp06cb33/s2DE8+OCDcHJygpeXF0aPHo3c3FwAwIYNG6BWqysMw48fPx6dOnUy3v7nn3/QsWNHODk5ISgoCOPHj0deXp7x/aGhoZg2bRpGjhwJNzc3jBo1qkqPS01NxSOPPAInJyfUrVsXS5cuvetz8sYbbyAxMRF79uzBiBEjEB0djQYNGmDUqFE4fPiw8ZdXRkYGhg8fDg8PD2g0GvTq1Qtnzpwx7mfRokVwd3fHmjVrEBERAY1GgwEDBiAvLw+LFy9GaGgoPDw88OKLL0Kv//fyyfKPdfjw4dBqtQgJCcFvv/2GtLQ09O3bF1qtFjExMdi/f3+Fz+PNZs+ejdDQUOPtkSNHol+/fvjoo4/g7+8PLy8vjB07FiUlJSbHvvnUTGZmJkaPHg0/Pz+o1Wo0atQIa9asAQBcv34dTz75JAIDA6HRaBATE4Nly5aZHG/btm349NNPjSMRiYmJlZ6a+fnnn9GwYUOoVCqEhobi448/NvlYQkNDMWPGDDz99NNwcXFBcHAwvvrqq9t+DnU6nfHNzc0NMpmswn23Pmflz8+MGTPg5+cHd3d3vPvuuygtLcWrr74KT09PBAYGYsGCBSbHunz5MgYNGgQPDw94eXmhb9++SExMvG02IiJrZbY5IgkJCVi1ahXWrFmDNWvWYNu2bfjggw+M73/99dcxc+ZMTJkyBSdPnsQPP/wAPz8/AEB+fj569uwJDw8P7Nu3Dz/99BM2btyIcePGAQC6desGd3d3/Pzzz8b96fV6rFixAkOGDAFQVmR69OiB/v374+jRo1i+fDl27Nhh3Ee5Dz/8EI0aNcKBAwcwZcqUKj1u5MiRSExMxObNm7Fy5UrMnTsXqampt30uDAYDfvzxRwwZMgR16tSp8H6tVgulUmnc9/79+7F69Wrs2rULkiShd+/eJr/Y8/Pz8dlnn+HHH3/E+vXrsXXrVvTv3x9//PEH/vjjDyxZsgRfffUVVq5caXKcTz75BO3bt8ehQ4fQp08fDBs2DMOHD8fQoUNx8OBBhIeHY/jw4dU+TbRlyxYkJCRgy5YtWLx4MRYtWoRFixbd9rno1asX/vnnH3z//fc4efIkPvjgAygUZZdBFhYWonnz5lizZg2OHz+O0aNHY9iwYdizZw8A4NNPP0Xbtm0xatQoJCcnIzk5GUFBQRWOc+DAATzxxBP4z3/+g2PHjmHq1KmYMmVKhVwff/wxWrRogUOHDuGFF17A888/j1Onanal0M2bN+PKlSv4+++/8b///Q9Tp07Fww8/DA8PD+zZswfPPfccnnvuOVy6dAlA2ee3S5cu0Gq1+Pvvv7Fjxw5otVr07NkTxcXFNZqNiEg4qRo6deokTZgwocL9v/76q3Tzrt555x1Jo9FI2dnZxvteffVVqXXr1pIkSVJ2drakUqmkr7/+utLjfPXVV5KHh4eUm5trvG/t2rWSXC6XUlJSJEmSpPHjx0sPPvig8f0bNmyQHB0dpfT0dEmSJGnYsGHS6NGjTfa7fft2SS6XSwUFBZIkSVJISIjUr18/k23u9rjTp09LAKTdu3cb3x8XFycBkD755JNKP56rV69KAKT//e9/lb6/XHx8vARA2rlzp/G+a9euSU5OTtKKFSskSZKkhQsXSgCks2fPGrcZM2aMpNFopJycHON9PXr0kMaMGWO8HRISIg0dOtR4Ozk5WQIgTZkyxXjfrl27JABScnKyJElln8fGjRubZPzkk0+kkJAQ4+0RI0ZIISEhUmlpqfG+gQMHSoMGDTI5dvlzs2HDBkkul0unT5++43Nxs969e0uvvPKK8XZlX4dbtmyRAEgZGRmSJEnS4MGDpe7du5ts8+qrr0rR0dEmuW5+TgwGg+Tr6yvNmzfvrpkWLlwoubm5Vbj/1ues/PnR6/XG+yIiIqQOHToYb5eWlkrOzs7SsmXLJEmSpG+//VaKiIiQDAaDcZuioiLJyclJ2rBhw12zERFZE7PNEQkNDYWLi4vxtr+/v3HUIC4uDkVFRejatWulj42Li0Pjxo3h7OxsvK99+/YwGAw4ffo0/Pz8MGTIELRt2xZXrlxBnTp1sHTpUvTu3RseHh4Ayv4iPnv2rMlpE0mSYDAYcP78eURFRQEAWrRoYXLsuz0uPj4eSqXS5HGRkZF3nN8h3RhhuNuE3ri4OCiVSrRu3dp4n5eXFyIiIhAXF2e8T6PRoF69esbbfn5+CA0NNZmb4OfnV2GUJjY21uT9ABATE1PhvtTU1GrNt2jYsKFxRAMo+1wfO3as0m0PHz6MwMBANGjQoNL36/V6fPDBB1i+fDkuX76MoqIiFBUVmXwtVEVcXBz69u1rcl/79u0xe/Zs6PV6Y96bn5PyUy13Gt26Fw0bNoRc/u/go5+fHxo1amS8rVAo4OXlZTxu+dfgzd8/QNloUUJCQo1mIyISrVpFxNXVFVlZFVdEzMzMhKurq8l9Dg4OJrdlMhkMBgOAskl9dyJJ0m1/aZff36pVK9SrVw8//vgjnn/+efz6669YuHChcTuDwYAxY8Zg/PjxFfYRHBxs/P+tv+Du9rjyyYnVuUrIx8cHHh4eJmWiMtJtTonc+nxU9tze6fmu7HHl+6vsvvLHyeXyCpluPkV0pzy3Hrvc3T73H3/8MT755BPMnj0bMTExcHZ2xsSJE6t9SqKyr6HKnt/qZL9X1f18GQwGNG/evNK5R7Ux8ZeIqDZVq4hERkZi3bp1Fe7ft28fIiIiqryf+vXrw8nJCZs2bcKzzz5b4f3R0dFYvHgx8vLyjEVh586dkMvlJn9JDx48GEuXLkVgYCDkcjn69OljfF+zZs1w4sQJhIeHV+dDvOvjoqKiUFpaiv3796NVq1YAgNOnT99x/Qq5XI5BgwZhyZIleOeddyrME8nLy4NKpUJ0dDRKS0uxZ88etGvXDkDZ5M34+HjjCE5t8vHxQUpKiskv9cOHD9/XPmNjY5GUlIT4+PhKR0W2b9+Ovn37YujQoQDKfimfOXPG5ON3dHQ0mYhbmejoaOzYscPkvn/++QcNGjQwGb2xRM2aNcPy5cvh6+tboeATEdmaak1WfeGFF5CQkICxY8fiyJEjiI+Px5w5c/Dtt9/i1VdfrfJ+1Go1Jk+ejNdeew3fffcdEhISsHv3bnz77bcAgCFDhkCtVmPEiBE4fvw4tmzZghdffBHDhg0znj4o3+7gwYOYPn06BgwYALVabXzf5MmTsWvXLowdOxaHDx/GmTNnsHr1arz44ot3zHa3x0VERKBnz54YNWoU9uzZgwMHDuDZZ5+961/6M2bMQFBQEFq3bo3vvvsOJ0+exJkzZ7BgwQI0adIEubm5qF+/Pvr27YtRo0Zhx44dOHLkCIYOHYqAgIAKpxlqQ+fOnZGWloZZs2YhISEBc+bMqbSIVkenTp3QsWNHPP744/jrr79w/vx5rFu3DuvXrwcAhIeH46+//sI///yDuLg4jBkzBikpKSb7CA0NxZ49e5CYmIhr165VOoLxyiuvYNOmTXj//fcRHx+PxYsX44svvsCkSZPuK39tGDJkCLy9vdG3b19s374d58+fx7Zt2zBhwgQkJSWJjkdEVKOqVURCQ0Oxfft2JCQk4KGHHkLLli2NV0gMHDiwWgeeMmUKXnnlFbz99tuIiorCoEGDjOfINRoNNmzYgPT0dLRs2RIDBgxA165d8cUXX5jso379+mjZsiWOHj1qvFqmXGxsLLZt24YzZ86gQ4cOaNq0KaZMmQJ/f/875qrK4xYuXIigoCB06tQJ/fv3x+jRo+Hr63vH/Xp4eGD37t0YOnQopk2bhqZNm6JDhw5YtmwZPvzwQ+NCXAsXLkTz5s3x8MMPo23btpAkCX/88UeFofzaEBUVhblz52LOnDlo3Lgx9u7dWyO/yH/++We0bNkSTz75JKKjo/Haa68ZRzimTJmCZs2aoUePHujcuTN0Oh369etn8vhJkyZBoVAgOjoaPj4+uHjxYoVjNGvWDCtWrMCPP/6IRo0a4e2338Z7772HkSNH3nd+c9NoNPj7778RHByM/v37IyoqCk8//TQKCgo4QkJENoevNUNERETC8LVmiIiISBgWESIiIhKGRYSIiIiEYREhIiIiYVhEiIiISBgWESIiIhKGRYSIiIiEYREhIiIiYVhEiIiISBgWESIiIhKGRYSIiIiEYREhIiIiYVhEiIiISBgWESIiIhKGRYSIiIiEYREhIiIiYVhEiIiISBgWESIiIhKGRYSIiIiEYREhIiIiYVhEiIiISBgWESIiIhKGRYSIiIiEYREhIiIiYVhEiIiISBgWESIiIhKGRYSIiIiEYREhIiIiYVhEiIiISBgWESIiIhKGRYSIiIiEYREhIiIiYVhEiIiISBgWESIiIhKGRYSIiIiEYREhIiIiYVhEiIiISBgWESIiIhKGRYSIiIiEYREhIiIiYVhEiIiISBgWESIiIhLm/wHI3SQD12oObQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The Model E2E Time is 9.352s.\n", - " --Computing Time is 6.273s\n", - " --Uncovered Communication Time is 0.464s\n", - " --Free Time is 2.615s\n" - ] - } - ], - "source": [ - "# 饼图展示计算、通信、空闲耗时的占比\n", - "overall_data = data.get(\"overall_data\", {})\n", - "plt.figure(figsize=(6, 6)) #设置饼图大小\n", - "plt.pie(x=overall_data.values(), labels=overall_data.keys(), explode=[0.01]*len(overall_data), autopct=\"%1.1f%%\")\n", - "plt.title(\"Model Profiling Time Distribution\")\n", - "plt.show()\n", - "print(bottleneck.get(\"overall_data\", \"\"))" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "6a1d82fb-a31b-49ab-a859-6d4bb898c512", - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " Computing Time Subtype Duration(s) Duration Ratio Kernel Number\n", - "0 Cube Time 3.956 63.06% 584\n", - "1 Vector Time 1.994 31.79% 5224\n", - "\n", - "Computing Time is 6.273s\n", - " if you want more detailed advice please go to compute_perf_analysis.ipynb\n" - ] - } - ], - "source": [ - "# 展示计算细分耗时,NPU开启level1或level2,aic_metric设为PipeUtilization\n", - "compute_time = data.get(\"computing\", {})\n", - "print(pd.DataFrame(compute_time))\n", - "print(\"\\n\", bottleneck.get(\"computing\", \"\"))" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "id": "35df1f13", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Empty DataFrame\n", - "Columns: []\n", - "Index: []\n", - "\n" - ] - } - ], - "source": [ - "# 展示通信细分耗时,通信耗时受profiling性能膨胀的影响,以L0 + NPU采集的profiling为准\n", - "communication_time = data.get(\"communication\", {})\n", - "print(pd.DataFrame(communication_time))\n", - "print(\"\\n\", bottleneck.get(\"communication\", \"\"))" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "c5e6034e", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " Free Time Subtype Duration(s) Duration Ratio Kernel Number\n", - "0 SDMA Time 0.073 2.79% 852\n", - "\n", - "Free Time is 2.615s\n", - " if you want more detailed advice please go to timeline_perf_analysis.ipynb\n" - ] - } - ], - "source": [ - "# 展示空闲细分耗时,该耗时受profiling性能膨胀的影响,以L0 + NPU采集的profiling为准\n", - "free_time = data.get(\"free\", {})\n", - "print(pd.DataFrame(free_time))\n", - "print(\"\\n\", bottleneck.get(\"free\", \"\"))" - ] - }, - { - "cell_type": "markdown", - "id": "3511befaff513e8e", - "metadata": { - "jupyter": { - "outputs_hidden": false - } - }, - "source": [ - "## 2)有对标的GPU数据" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "2a1e617d2a117125", - "metadata": { - "jupyter": { - "outputs_hidden": false - } - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "+----------------------------------------------------------------------------------------------------------------+\n", - "| Model Profiling Time Distribution |\n", - "+-----+----------------+------------------+----------------+------------------------------+-----------+----------+\n", - "| | Cube Time(Num) | Vector Time(Num) | Computing Time | Uncovered Communication Time | Free Time | E2E Time |\n", - "+-----+----------------+------------------+----------------+------------------------------+-----------+----------+\n", - "| GPU | 3.149s(582) | 1.346s(3433) | 4.748s | 0.024s | 0.051s | 4.840s |\n", - "| NPU | 3.956s(584) | 1.994s(5224) | 6.273s | 0.464s | 2.615s | 9.352s |\n", - "+-----+----------------+------------------+----------------+------------------------------+-----------+----------+\n" - ] - } - ], - "source": [ - "# 有可对比的GPU数据情况下,展示比对结果\n", - "from prettytable import PrettyTable\n", - "comparison_result = data.get(\"comparison_result\", {})\n", - "if not comparison_result:\n", - " print(\"Invalid comparison data, you need to set the gpu_profiling_path.\")\n", - "if comparison_result:\n", - " for sheet_name, data in comparison_result.items():\n", - " if data.get(\"rows\", []):\n", - " table = PrettyTable()\n", - " table.title = sheet_name\n", - " table.field_names = data.get(\"headers\", [])\n", - " for row in data.get(\"rows\", []):\n", - " table.add_row(row)\n", - " print(table)\n", - " print(bottleneck.get(\"comparison_result\", \"\"))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0d968851", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.9" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/profiler/advisor/result/__init__.py b/profiler/advisor/result/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/result/item.py b/profiler/advisor/result/item.py new file mode 100644 index 0000000000..500d32c987 --- /dev/null +++ b/profiler/advisor/result/item.py @@ -0,0 +1,61 @@ +class OptimizeItem: + + def __init__(self, problem, description, suggestion): + self.problem = problem + self.description = description + self.suggestion = suggestion + + @property + def data(self): + format_suggestions = [] + for index, suggesion in enumerate(self.suggestion): + format_suggestions.append(f"{index + 1}. {suggesion}") + suggestion_str = "\n".join(format_suggestions) + return [self.problem, self.description, suggestion_str] + + @property + def headers(self): + return ["problem", "description", "suggestion"] + + +class StatisticsItem: + def __init__(self, total_task_duration, task_duration, count, income=None): + self.total_task_duration = total_task_duration + self.task_duration = task_duration + self.count = count + self.income = income + if not isinstance(task_duration, str): + self.task_duration_ratio = round(task_duration / total_task_duration, 4) if total_task_duration != 0 else 0 + else: + self.task_duration_ratio = "" + + @property + def data(self): + + def _cal_ratio(divisor, dividend): + if divisor and dividend != 0: + return divisor, round(divisor / dividend, 4) + else: + return "", "" + + income, income_ratio = _cal_ratio(self.income, self.total_task_duration) + return [self.count, self.total_task_duration, self.task_duration_ratio, income, income_ratio] + + @property + def headers(self): + return ["problem count", "total_time(us)", "time ratio", "income(us)", "income ratio"] + + +class OptimizeRecord: + + def __init__(self, optimization_item, statistics_item=None) -> None: + self.optimization_item = optimization_item + self.statistics_item = statistics_item or StatisticsItem("", "", "") + + @property + def data(self): + return self.optimization_item.data + self.statistics_item.data + + @property + def headers(self): + return self.optimization_item.headers + self.statistics_item.headers diff --git a/profiler/advisor/result/result.py b/profiler/advisor/result/result.py new file mode 100644 index 0000000000..308db61231 --- /dev/null +++ b/profiler/advisor/result/result.py @@ -0,0 +1,201 @@ +import json +import os +import stat +from textwrap import fill +from collections import OrderedDict + +import click +import xlsxwriter +from prettytable import ALL, PrettyTable + +from profiler.advisor.common import constant as const +from profiler.advisor.utils.utils import singleton, logger +from profiler.advisor.config.config import Config + + +class ResultWriter: + def __init__(self, result_path=None): + self.result_path = result_path + self.workbook = xlsxwriter.Workbook(result_path) + + self.header_format = None + self.data_cell_format = None + self._init_header_format() + self._init_data_cell_format() + + def _init_header_format(self): + self.header_format = self.workbook.add_format({ + "bold": True, + "color": "#FFFFFF", + "bg_color": "#187498", + "align": "center", + "border": 1, + "font_name": "Arial", + }) + + def _init_data_cell_format(self): + self.data_cell_format = self.workbook.add_format({ + "bold": False, + "align": "left", + "valign": "top", + "border": 1, + "font_name": "Arial", + 'text_wrap': True + }) + + def add_data(self, sheet_name, headers, data_list): + sheet = self.workbook.add_worksheet(sheet_name) + + if headers: + for col_index, header in enumerate(headers): + sheet.write(0, col_index, header, self.header_format) + + if data_list: + for i, row_data in enumerate(data_list): + row_index = i + 1 + for col_index, value in enumerate(row_data): + sheet.write(row_index, col_index, value, self.data_cell_format) + + sheet.autofit() + + def save(self): + try: + self.workbook.close() + except Exception as e: + logger.error("Failed to save analysis results, reason is %s", e) + + +@singleton +class SheetRecoder: + + def __init__(self): + self._sheet_data = OrderedDict() + + @property + def sheet_data(self): + return self._sheet_data + + def _init_sheet_name(self, sheet_name): + if sheet_name not in self._sheet_data: + self._sheet_data[sheet_name] = {} + + def add_headers(self, sheet_name, headers): + self._init_sheet_name(sheet_name) + + if self._sheet_data[sheet_name].get("headers") is None: + self._sheet_data[sheet_name]["headers"] = headers + + def add_data(self, sheet_name, data): + self._init_sheet_name(sheet_name) + + if not isinstance(self._sheet_data[sheet_name].get("data"), list): + self._sheet_data[sheet_name]["data"] = [] + self._sheet_data[sheet_name]["data"].append(data) + + +@singleton +class OptimizeResult: + + def __init__(self): + self.result_writer = ResultWriter(Config().analysis_result_file) + self.sheet_recorder = SheetRecoder() + self.page_dict = False + self._tune_op_list = [] + + def add_tune_op_list(self, tune_op_list) -> None: + """ + add tune op name to tune op list + :param tune_op_list: tune op name list to be added + :return: None + """ + for op_name in tune_op_list: + if op_name not in self._tune_op_list: + self._tune_op_list.append(op_name) + + def add(self, overview_item): + sheet_name = "problems" + + headers = overview_item.headers + data = overview_item.data + self.sheet_recorder.add_headers(sheet_name, headers) + self.sheet_recorder.add_data(sheet_name, data) + + TerminalResult().add(overview_item.optimization_item.data) + self.page_dict = True + + def add_detail(self, sheet_name, headers=None, detail=None): + if headers: + self.sheet_recorder.add_headers(sheet_name, headers) + if detail: + self.sheet_recorder.add_data(sheet_name, detail) + self.page_dict = True + + def show(self): + for sheet_name, sheet_data in self.sheet_recorder.sheet_data.items(): + self.result_writer.add_data(sheet_name, sheet_data.get("headers"), sheet_data.get("data")) + self.result_writer.save() + self._save_op_file_list() + TerminalResult().print() + + def _save_op_file_list(self) -> None: + if not self._tune_op_list: + return + tune_op_dict = {"tune_ops_name": self._tune_op_list} + tune_ops_file = Config().tune_ops_file + try: + + with os.fdopen(os.open(tune_ops_file, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, stat.S_IWUSR | stat.S_IRUSR), + 'w', encoding="utf-8") as op_tune_file: + json.dump(tune_op_dict, op_tune_file) + except OSError as error: + logger.error("Dump op_list to %s failed, %s", tune_ops_file, error) + return + logger.info("Save tune op name list to %s", tune_ops_file) + + +@singleton +class TerminalResult: + """ + Result output to screen + """ + + def __init__(self): + self.width, _ = self.get_terminal_size() + if self.width is None: + self.table = PrettyTable(["No.", "Problem", "Description", "Suggestion"]) + else: + self.table = PrettyTable(["No.", "Problem", "Description", "Suggestion"], + max_table_width=max(self.width - 20, 180)) + self.table.hrules = ALL + self.result_list = [] + + @staticmethod + def get_terminal_size(): + try: + width, height = os.get_terminal_size() + except OSError: + width, height = None, None + return width, height + + def add(self, result_str): + """ + add a result str + """ + self.result_list.append(result_str) + + def print(self): + """ + print screen result with format table + """ + table_row_cnt = 0 + for result in self.result_list: + table_row_cnt += 1 + result[1] = fill(result[1], width=40) + result[2] = fill(result[2], width=40) + self.table.add_row([table_row_cnt] + result) + self.table.align = "l" + + if table_row_cnt > 0: + click.echo(self.table) + else: + click.echo(click.style(const.SKIP_ANALYZE_PROMPT, fg='red')) diff --git a/profiler/advisor/rules/__init__.py b/profiler/advisor/rules/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/rules/timeline_fusion_ops.yaml b/profiler/advisor/rules/timeline_fusion_ops.yaml new file mode 100644 index 0000000000..764dd5d50f --- /dev/null +++ b/profiler/advisor/rules/timeline_fusion_ops.yaml @@ -0,0 +1,59 @@ +- cann_version: 6.3.RC2 + torch_version: 1.11.0 + unique_id: 0 + operator_rules: + aten: + add: + torch_npu.npu_confusion_transpose: ["(permute|transpose)-(contiguous){0,1}-(reshape|view)", + "(reshape|view)-(contiguous){0,1}-(permute|transpose)"] + torch_npu.fast_gelu: [gelu] + torch_npu.npu_linear: [linear] + torch_npu.npu_mish: [mish] + torch_npu.contrib.module.Mish: [mish] + torch_npu.npu_scaled_masked_softmax: [ "softmax-(mul){0,1}-(masked_fill_|add)" ] + torch_npu.npu_silu: [ silu, mul-sigmoid, sigmoid-mul ] + torch_npu.contrib.module.SiLU: [ silu, mul-sigmoid, sigmoid-mul ] + optimizer.clip_grad_norm_fused_: [add-reciprocal-mul] + Optimizer: + add: + torch_npu.optim.NpuFusedAdamW: [AdamW.step] + torch_npu.optim.NpuFusedSGD: [SGD.step] + torch_npu.optim.NpuFusedAdadelta: [Adadelta.step] + torch_npu.optim.NpuFusedLamb: [Lamb.step] + torch_npu.optim.NpuFusedAdamP: [AdamP.step] + torch_npu.optim.NpuFusedBertAdam: [BertAdam.step] + torch_npu.optim.NpuFusedRMSprop: [RMSprop.step] + torch_npu.optim.NpuFusedRMSpropTF: [RMSpropTF.step] + torch_npu.optim.NpuFusedAdam: [Adam.step] + + +- cann_version: 7.0.RC1 + torch_version: [1.11.0,2.1.0] + unique_id: 1 + inherit_unique_id: 0 + operator_rules: + aten: + add: + torch_npu.npu_fusion_attention: ["matmul-(add){0,1}-(mul){0,1}-(masked_fill_|add){0,1}-softmax-(dropout){0,1}-matmul"] + torch_npu.npu_rotary_mul: ["(chunk|slice)-neg-cat-(mul){0,2}-add"] + +- cann_version: 7.0.0 + torch_version: [1.11.0, 2.1.0] + unique_id: 2 + inherit_unique_id: 1 + operator_rules: + aten: + add: + torch_npu.npu_rms_norm: ["(pow){0,1}-(mean){0,1}-(add){0,1}-rsqrt-mul-(type_as){0,1}"] + torch_npu.npu_swiglu: [ "(slice|chunk)-silu-mul", "(slice|chunk)-mul-silu", + "(slice|chunk)-sigmoid-mul-mul", "(slice|chunk)-mul-sigmoid-mul", + "(slice|chunk)-mul-mul-sigmoid" ] + +- cann_version: 8.0.0 + torch_version: [1.11.0, 2.1.0] + unique_id: 3 + inherit_unique_id: 2 + operator_rules: + aten: + add: + torch_npu.npu_geglu: ["(slice|chunk)-gelu-mul", "(slice|chunk)-mul-gelu"] \ No newline at end of file diff --git a/profiler/advisor/timeline_perf_analysis.ipynb b/profiler/advisor/timeline_perf_analysis.ipynb deleted file mode 100644 index 34233db6fe..0000000000 --- a/profiler/advisor/timeline_perf_analysis.ipynb +++ /dev/null @@ -1,163 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from advisor_backend.interface import Interface\n", - "import matplotlib.pyplot as plt" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Timeline调优分析\n", - "\n", - "## 1. Timeline分析的数据准备\n", - "我们当前支持Ascend PyTorch Profiler方式采集后的ascend_pt目录,并支持单独分析ascend_pt/ASCEND_PROFILER_OUTPUT目录下的trace_view.json文件。\n", - "\n", - "## 2. Timeline分析解决的问题\n", - "当前支持的功能:\n", - "1) 识别当前可选择的NPU亲和优化器。\n", - "2) 分析算子调度瓶颈。" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "# EDIT THE PROFILING DATA PATH\n", - "timeline_path = \"[YOUR PATH]\"\n", - "interface = Interface(timeline_path)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 1)亲和优化器识别" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[INFO] Start to analyse the target file: [YOUR PATH]\n", - "['Optimizer.step#AdamW.step']\n", - "You can choose torch_npu.optim.NpuFusedAdamW to replace the current Optimizer: Optimizer.step#AdamW.step.\n" - ] - } - ], - "source": [ - "dataset = interface.get_data('timeline', 'optimizer')\n", - "# 打印当前使用的优化器\n", - "data = dataset.get('data')\n", - "print(data)\n", - "\n", - "# 如果使用了原生优化器,则打印优化建议\n", - "advice = dataset.get('advice')\n", - "print(advice)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 2)算子调度分析\n" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[INFO] Start to analyse the target file: [YOUR PATH]\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABMQAAAK9CAYAAADLzbDJAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOydd7wcVd3/P1tuS09IAgmE3iMdfVRAwOfhJyiKyKOPijyooIAFLKAgKlgwdAULTaSIID5SpEdagCBVCKGkkkp6bm5uv9tmfn/sntlzzpxz5szu7N25u9/365VX7r07O3N2dnd25rOfz+ckXNd1QRAEQRAEQRAEQRAEQRBNQrLeAyAIgiAIgiAIgiAIgiCI4YQEMYIgCIIgCIIgCIIgCKKpIEGMIAiCIAiCIAiCIAiCaCpIECMIgiAIgiAIgiAIgiCaChLECIIgCIIgCIIgCIIgiKaCBDGCIAiCIAiCIAiCIAiiqSBBjCAIgiAIgiAIgiAIgmgqSBAjCIIgCIIgCIIgCIIgmgoSxAiCIAiCIAiCIAiCIIimggQxgiAIgiCIEcqKFSuQSCRw5ZVX1nxbt956KxKJBFasWBH6vnPmzEEikcCcOXMiHxdBEARBEEQlkCBGEARBEETT8/bbb+NLX/oStt9+e7S1tWH69Ok4+eST8fbbb9dkew8++CCOPPJITJ06FaNGjcKuu+6Kz33uc3jsscdqsj2CIAiCIAhChAQxgiAIgiCamnvvvRcHH3wwnnzySXzlK1/BH/7wB5x22ml4+umncfDBB+O+++6LdHtXXnklPvWpTyGRSOCCCy7Ar3/9a5x00klYsmQJ/vrXv0a6LYIgCIIgCEJNut4DIAiCIAiCqBfvvvsuTjnlFOy666549tlnMWXKFO+2c845B0cccQROOeUUzJ8/H7vuumvV28vn8/jFL36BY445Bv/85z99t2/cuLHqbRAEQRAEQRDBkEOMIAiCIIim5YorrsDAwABuvPFGQQwDgMmTJ+OGG25Af38/Lr/8cu/vF198MRKJBBYuXIjPfe5zGDduHLbZZhucc845GBoaMm5v8+bN6OnpwWGHHaa8ferUqcLvQ0NDuPjii7Hnnnuivb0d06ZNw2c+8xm8++67vvveeOON2G233dDW1ob3v//9eOWVV3zLLFy4EP/93/+NSZMmob29HYceeigeeOAB33Jvv/02PvrRj6KjowM77LADfvnLX8JxHN9yiUQCF198se/vO++8M7785S9r9kKZl156CcceeyzGjx+PUaNG4cgjj8Tzzz8feD+CIAiCIIhqIYcYQRAEQRBNy4MPPoidd94ZRxxxhPL2j3zkI9h5553x8MMP+2773Oc+h5133hmzZs3Ciy++iGuvvRZdXV24/fbbtdubOnUqOjo68OCDD+Lb3/42Jk2apF22UCjg+OOPx5NPPonPf/7zOOecc9Db24vHH38cb731FnbbbTdv2TvvvBO9vb0444wzkEgkcPnll+Mzn/kMli1bhpaWFgBFkeuwww7D9ttvj/PPPx+jR4/G3/72N3z605/GPffcgxNPPBEAsH79ehx99NHI5/PecjfeeCM6Ojqs9qktTz31FI477jgccsghuOiii5BMJnHLLbfgox/9KJ577jl84AMfiHR7BEEQBEEQPCSIEQRBEATRlHR3d2Pt2rU44YQTjMvtv//+eOCBB9Db24uxY8d6f99ll13wj3/8AwDwzW9+E+PGjcMf/vAHnHvuudh///2V60omkzjvvPPw85//HDvuuCM+8pGP4PDDD8exxx6Lgw8+WFj29ttvx5NPPomrr74a3/3ud72/n3/++XBdV1h21apVWLJkCSZOnAgA2GuvvXDCCSdg9uzZOP744wEUI6A77rgjXnnlFbS1tQEAvvGNb+Dwww/HD3/4Q08Qu+yyy7Bp0ya89NJLnih16qmnYo899jDv0BC4roszzzwTRx99NB599FEkEgkAwBlnnIGZM2fixz/+sTJSShAEQRAEERUUmSQIgiAIoinp7e0FAEHkUsFu7+npEf7+zW9+U/j929/+NgDgkUceMa7vZz/7Ge68804cdNBBmD17Ni688EIccsghOPjgg7FgwQJvuXvuuQeTJ0/21svDBCTG//zP/3hiGADP8bZs2TIAwJYtW/DUU0/hc5/7HHp7e7F582Zs3rwZnZ2d+NjHPoYlS5ZgzZo13vg/+MEPCg6tKVOm4OSTTzY+rjDMmzcPS5YswRe/+EV0dnZ64+nv78d//ud/4tlnn1VGNAmCIAiCIKKCHGIEQRAEQTQlTOhiwpgOnXAmO6Z22203JJNJrFixInDbX/jCF/CFL3wBPT09eOmll3DrrbfizjvvxCc/+Um89dZbaG9vx7vvvou99toL6XTw6dqOO+4o/M7Esa6uLgDA0qVL4boufvKTn+AnP/mJch0bN27E9ttvj5UrV+I//uM/fLfvtddegeOwZcmSJQCKzjMd3d3dgshHEARBEAQRJSSIEQRBEATRlIwfPx7Tpk3D/PnzjcvNnz8f22+/PcaNG2dcTnZt2TBu3Dgcc8wxOOaYY9DS0oLbbrsNL730Eo488shQ60mlUsq/s2glc1ude+65+NjHPqZcdvfddw+1TROFQsF4OxvPFVdcgQMPPFC5zJgxYyIbD0EQBEEQhAwJYgRBEARBNC3HH388brrpJsydOxeHH3647/bnnnsOK1aswBlnnOG7bcmSJdhll12835cuXQrHcbDzzjtXNJZDDz0Ut912G9atWweg6Dh76aWXkMvlvGL8Stl1110BAC0tLfiv//ov47I77bST5+DiWbRoke9vEydOxNatW4W/ZbNZ7zHoYBMCjBs3LnA8BEEQBEEQtYA6xAiCIAiCaFrOO+88dHR04IwzzkBnZ6dw25YtW3DmmWdi1KhROO+883z3/f3vfy/8/tvf/hYAcNxxx2m3NzAwgBdeeEF526OPPgqgHE086aSTsHnzZvzud7/zLSuX6gcxdepUHHXUUbjhhhuUYtWmTZu8nz/+8Y/jxRdfxMsvvyzc/pe//MV3v9122w3PPvus8Lcbb7wx0CF2yCGHYLfddsOVV16Jvr4+43gIgiAIgiBqATnECIIgCIJoWvbYYw/cdtttOPnkk7HffvvhtNNOwy677IIVK1bg5ptvxubNm3HXXXd5jiae5cuX41Of+hSOPfZYvPDCC7jjjjvwxS9+EQcccIB2ewMDA/jwhz+MD37wgzj22GMxY8YMbN26Fffffz+ee+45fPrTn8ZBBx0EAPjf//1f3H777fje976Hl19+GUcccQT6+/vxxBNP4Bvf+Ebg7Jgyv//973H44Ydjv/32w9e+9jXsuuuu2LBhA1544QW89957eOONNwAAP/jBD/DnP/8Zxx57LM455xyMHj0aN954I3baaSdfvPT000/HmWeeiZNOOgnHHHMM3njjDcyePRuTJ082jiWZTOKPf/wjjjvuOMycORNf+cpXsP3222PNmjV4+umnMW7cODz44IOhHh9BEARBEEQYSBAjCIIgCKKp+exnP4u9994bs2bN8kSwbbbZBkcffTR+9KMf4X3ve5/yfnfffTd++tOf4vzzz0c6nca3vvUtXHHFFcZtTZgwATfddBMefvhh3HLLLVi/fj1SqRT22msvXHHFFTj77LO9ZVOpFB555BFccskluPPOO3HPPfdgm2228UStsOy777549dVX8bOf/Qy33norOjs7MXXqVBx00EH46U9/6i03bdo0PP300/j2t7+NSy+9FNtssw3OPPNMTJ8+Haeddpqwzq997WtYvnw5br75Zjz22GM44ogj8Pjjj+M///M/A8dz1FFH4YUXXsAvfvEL/O53v0NfXx+22247/Md//IcyokoQBEEQBBElCTes554gCIIgCKKJufjii/Gzn/0MmzZtCnRCEQRBEARBEPGEOsQIgiAIgiAIgiAIgiCIpoIEMYIgCIIgCIIgCIIgCKKpIEGMIAiCIAiCIAiCIAiCaCqoQ4wgCIIgCIIgCIIgCIJoKurqECsUCvjJT36CXXbZBR0dHdhtt93wi1/8AqTREQRBEARBEARBEARBELUiXc+NX3bZZbjuuutw2223YebMmXj11Vfxla98BePHjxemHScIgiAIgiAIgiAIgiCIqKhrZPL444/Htttui5tvvtn720knnYSOjg7ccccdgfd3HAdr167F2LFjkUgkajlUgiAIgiAIgiAIgiAIIua4rove3l5Mnz4dyaQ+GFlXh9iHP/xh3HjjjVi8eDH23HNPvPHGG5g7dy6uvvpq5fKZTAaZTMb7fc2aNdh3332Ha7gEQRAEQRAEQRAEQRDECGD16tXYYYcdtLfXVRA7//zz0dPTg7333hupVAqFQgGXXHIJTj75ZOXys2bNws9+9jPf31evXo1x48bVergEQRAEQRAEQRAEQRBEjOnp6cGMGTMwduxY43J1jUz+9a9/xXnnnYcrrrgCM2fOxLx58/Cd73wHV199NU499VTf8rJDjD3I7u5uEsQIgiAIgiAIgiAIgiCanJ6eHowfPz5QK6qrIDZjxgycf/75+OY3v+n97Ze//CXuuOMOLFy4MPD+tg+SIAiCIAiCIAiCIAiCaHxstSJ9u9gwMDAw4Cs4S6VScBynTiMiCIIgCIIgCIIgCIIgGp26doh98pOfxCWXXIIdd9wRM2fOxOuvv46rr74aX/3qV+s5LIIgCIIgCIIgCIIgCKKBqWtksre3Fz/5yU9w3333YePGjZg+fTq+8IUv4Kc//SlaW1sD70+RSYIgCIIgCIIgCIJoLlzXRT6fR6FQqPdQiDqQSqWQTqeRSCSUt4+IDrFqIUGMIAiCIAiCIAiCIJqHbDaLdevWYWBgoN5DIerIqFGjMG3aNKWZylYrqmtkkiAIgiAIgiAIgiAIwgbHcbB8+XKkUilMnz4dra2tWpcQ0Zi4rotsNotNmzZh+fLl2GOPPXzd9LaQIEYQBEEQBEEQBEEQROzJZrNwHAczZszAqFGj6j0cok50dHSgpaUFK1euRDabRXt7e0XrqesskwRBEARBEARBEARBEGGo1BFENA5RvAboVUQQBEEQBEEQBEEQBEE0FSSIEQRBEARBEARBEARBEE0FCWIEQRAEQRAEQRAEQRANQiKRwP3331+37V988cU48MAD67Z9W0gQIwiCIAiCIAiCIAiCIEKjEt/OPfdcPPnkk/UZUAholkmCIAiCIAiCIAiCIAgCAFAoFJBIJCourh8zZgzGjBkT8aiihxxiBEEQBEEQBEEQBEGMTFwX6O+vzz/XtR5mJpPB2WefjalTp6K9vR2HH344XnnlFe/2OXPmIJFI4OGHH8b++++P9vZ2fPCDH8Rbb71lXO+SJUvwkY98BO3t7dh3333x+OOPC7ez9W7dutX727x585BIJLBixQoAwK233ooJEybggQcewL777ou2tjasWrUKr7zyCo455hhMnjwZ48ePx5FHHonXXnvNW8/OO+8MADjxxBORSCS83+XIpOM4+PnPf44ddtgBbW1tOPDAA/HYY495t69YsQKJRAL33nsvjj76aIwaNQoHHHAAXnjhBev9WwkkiBEEQRAEQRAEQRAEMTIZGADGjKnPv4EB62H+4Ac/wD333IPbbrsNr732GnbffXd87GMfw5YtW4TlzjvvPFx11VV45ZVXMGXKFHzyk59ELpdTrtNxHHzmM59Ba2srXnrpJVx//fX44Q9/WOFuHMBll12GP/7xj3j77bcxdepU9Pb24tRTT8XcuXPx4osvYo899sDHP/5x9Pb2AoAn6N1yyy1Yt26dIPDxXHPNNbjqqqtw5ZVXYv78+fjYxz6GT33qU1iyZImw3IUXXohzzz0X8+bNw5577okvfOELyOfzFT0eG0gQIwiCIAiCIAiCIAiCqBH9/f247rrrcMUVV+C4447Dvvvui5tuugkdHR24+eabhWUvuugiHHPMMdhvv/1w2223YcOGDbjvvvuU633iiSewcOFC3H777TjggAPwkY98BL/61a8qGmMul8Mf/vAHfPjDH8Zee+2FUaNG4aMf/Si+9KUvYe+998Y+++yDG2+8EQMDA3jmmWcAAFOmTAEATJgwAdttt533u8yVV16JH/7wh/j85z+PvfbaC5dddhkOPPBA/OY3vxGWO/fcc/GJT3wCe+65J372s59h5cqVWLp0aUWPxwbqECMIgiAIgiAIgiAIYmQyahTQ11e/bVvw7rvvIpfL4bDDDvP+1tLSgg984ANYsGCBsOyHPvQh7+dJkyZhr7328i3DWLBgAWbMmIHp06cr7x+G1tZW7L///sLfNmzYgB//+MeYM2cONm7ciEKhgIGBAaxatcp6vT09PVi7dq3w2AHgsMMOwxtvvCH8jd/+tGnTAAAbN27E3nvvHfbhWEGCGEEQBEEQBEEQBEEQI5NEAhg9ut6jiC2sGN/l+s5UEcyOjg4kEgnhb6eeeio6OztxzTXXYKeddkJbWxs+9KEPIZvN1mSsLS0t3s9sLI7j1GRbAEUmCYIgCIIgCIIgCIIgasZuu+2G1tZWPP/8897fcrkcXnnlFey7777Csi+++KL3c1dXFxYvXox99tlHud599tkHq1evxrp165T3B8qxRn6ZefPmWY37+eefx9lnn42Pf/zjmDlzJtra2rB582ZhmZaWFhQKBe06xo0bh+nTpwuPna1bfuzDDTnECIIgCIIgCIIgCIIgasTo0aNx1lln4bzzzsOkSZOw44474vLLL8fAwABOO+00Ydmf//zn2GabbbDtttviwgsvxOTJk/HpT39aud7/+q//wp577olTTz0VV1xxBXp6enDhhRcKy+y+++6YMWMGLr74YlxyySVYvHgxrrrqKqtx77HHHvjzn/+MQw89FD09PTjvvPPQ0dEhLLPzzjvjySefxGGHHYa2tjZMnDjRt57zzjsPF110EXbbbTcceOCBuOWWWzBv3jz85S9/sRpHrSCHGEEQBEEQBEEQBEEQRA259NJLcdJJJ+GUU07BwQcfjKVLl2L27Nk+AenSSy/FOeecg0MOOQTr16/Hgw8+iNbWVuU6k8kk7rvvPgwODuIDH/gATj/9dFxyySXCMi0tLbjrrruwcOFC7L///rjsssvwy1/+0mrMN998M7q6unDwwQfjlFNOwdlnn42pU6cKy1x11VV4/PHHMWPGDBx00EHK9Zx99tn43ve+h+9///vYb7/98Nhjj+GBBx7AHnvsYTWOWpFw+SDpCKOnpwfjx49Hd3c3xo0bV+/hEARBEARBEARBEARRI4aGhrB8+XLssssuaG9vr/dwImXOnDk4+uij0dXVhQkTJtR7OLHH9Fqw1YrIIUYQBEEQBEEQBEEQBEE0FSSIEQRhx+uvA1u31nsUBEEQBEEQBEEQBFE1JIgRBGPLFuDSS4HVq+s9kuh54w1g1arK7z9/PnDwwcD//m90YyIIgiAIgiAIgiAAAEcddRRc16W45DBCghhBMG69FbjgAuDXvxb/PjQErF1blyFFQmcn8P73A//v/1W+DiamNaJYSBAEQRAEQRAEQTQdJIgRBKOnp/h/X5/49099Cthpp5Erim3cCORywLp1la+jUCj+P3Ln4CAIgiAIgiAIgiAIDxLECIKRzxf/dxzx70uWFG9buXL4xxQFTMySH1cY2L4hQYwgCIIgCIIgCIJoAEgQIwiGThBjv49UMUgnZi1aBHzxi8DbbwevIwpRjSAIgiAIgiAIgiBiAgliBMHQxQKZCDRSxSCdmHXHHcBddxW702zXMVJFQYIgCIIgCIIgCILgIEGMIBiN7hCTH1c2K/5vs46Rug8IgiAIgiAIgiAIgoMEMYJg6JxUjeoQCyP0kUOMIAiCIAiCIAii4UkkErj//vvrPYxhgQQxgmAEOcRGqiCmc3eFeVy6fUMQBEEQBEEQBEE0DOvWrcNxxx0HAFixYgUSiQTmzZtX30HViHS9B0AQsUHnghrp7qgonG8jfR8QBEEQBEEQBEEQgWy33Xb1HsKwQQ4xgmA0ukNMHn8YkYs6xAiCIAiCiAsLFhQnBbr0UmDZMvG22bOB004DFi6sy9AIghh+XNdFf7a/Lv/cENdHmUwGZ599NqZOnYr29nYcfvjheOWVV7zb58yZg0QigYcffhj7778/2tvb8cEPfhBvvfWWdp3nnnsujj/+eO/33/zmN0gkEnjssce8v+2+++744x//CAB45ZVXcMwxx2Dy5MkYP348jjzySLz22mvCOvnI5C677AIAOOigg5BIJHDUUUdZP96RADnECILRqKX6TPgCio8hkSj+XIlDbKSKggRBEARBNAaLFwMzZ5bPy15/Hbj77vLtv/oV8Oyzxdm0f/Yz4Ic/LJ/7EATRkAzkBjBm1pi6bLvvgj6Mbh1ttewPfvAD3HPPPbjtttuw00474fLLL8fHPvYxLF26FJMmTfKWO++883DNNddgu+22w49+9CN88pOfxOLFi9HS0uJb55FHHok//vGPKBQKSKVSeOaZZzB58mTMmTMHxx57LNasWYN3333XE7J6e3tx6qmn4re//S1c18VVV12Fj3/841iyZAnGjh3rW//LL7+MD3zgA3jiiScwc+ZMtLa2VrajYgo5xAiCoXNMNYpDDBAfWxihjxxiBEEQBEHEgeXLxfOR5cvF2wcGiv9ns8AFFwCLFg3f2AiCIDT09/fjuuuuwxVXXIHjjjsO++67L2666SZ0dHTg5ptvFpa96KKLcMwxx2C//fbDbbfdhg0bNuC+++5TrveII45Ab28vXn/9dbiui2effRbf//73MWfOHABF19n222+P3XffHQDw0Y9+FF/60pew9957Y5999sGNN96IgYEBPPPMM8r1T5kyBQCwzTbbYLvtthOEu0aAHGIEwWhUhxgviDkOkEyWf+b/N9HsHWI9PcUIxsc/Doy2+waIIAiCIIgKuf124Oabgb//HShdjHnw5zUAsH69+HsuJ/7OBDKCIBqWUS2j0HdBX922bcO7776LXC6Hww47zPtbS0sLPvCBD2DBggXCsh/60Ie8nydNmoS99trLtwxjwoQJOOCAAzBnzhy0traitbUVX//613HRRRehr68PzzzzDI488khv+Q0bNuDHP/4x5syZg40bN6JQKGBgYACrVq0K87AbBhLECIIRRfl8HOEjk/xjqGSWyWYVxH79a+Dii4v/f+c79R4NQTQU3UPdaEm1WJ9QEgTRBPzpT8XY4zPPAP/93+JtTPDaYQfgvfeADRvESghZEGvWcxeCaCISiYR1bLEROeqoozBnzhy0tbXhyCOPxKRJk7DPPvtg7ty5eOaZZ/D973/fW/bUU09FZ2cnrrnmGuy0005oa2vDhz70IWSz2To+gvpBkUmCYOhEn5EuiEURmWz2DrFNm4r/b9xY33EQRIORLWSx1+/2wiE3HlLvoRAEESfYeYcsbvF/22GH4v/ZLLB1q/92BgliBEHEgN122w2tra14/vnnvb/lcjm88sor2HfffYVlX3zxRe/nrq4uLF68GPvss4923UceeSTmzp2LJ5980usKO+qoo3DXXXdh8eLFQhH+888/j7PPPhsf//jHMXPmTLS1tWHz5s3adbPOsAJvsmggyCFGEIwgh9hIPaEih1j1sMfdoB8EBFEvtgxuwYb+DdjQvwGu6yJBxdcEQQDlz105HgmUBa8xY4AJE4pi2Pr1wMSJ4u2MZv0yjyCIWDF69GicddZZOO+88zBp0iTsuOOOuPzyyzEwMIDTTjtNWPbnP/85ttlmG2y77ba48MILMXnyZHz605/WrvsjH/kIent78dBDD+HSSy8FUBTE/vu//xvTpk3Dnnvu6S27xx574M9//jMOPfRQ9PT04LzzzkNHR4d23VOnTkVHRwcee+wx7LDDDmhvb8f48eOr2xkxghxiBMEI6hAbqSdUcoeY/HMYh1izC2KqE3OCICrGccvHJBdNenwhCMKPjSCWTgPbbVf8me8Rk+/TrOcuBEHEjksvvRQnnXQSTjnlFBx88MFYunQpZs+ejYlM0OeWO+ecc3DIIYdg/fr1ePDBB42zO06cOBH77bcfpkyZgr333htAUSRzHEfoDwOAm2++GV1dXTj44INxyimn4Oyzz8bUqVO1606n07j22mtxww03YPr06TjhhBOq2APxgxxiBMEImmVypJ5Q8a4mVWSSHGLBkEOMIGoCL4gVnAKSKfqejiAI2AliLS1FQWzhQlEQo8gkQRAxpb29Hddeey2uvfZa43KHH3443nrrrVDrnjdvnvD7pEmT4Ciu8w466CC88sorwt/+W+pqdKXj5umnn47TTz891HhGCnTmSRAMlUPMdcsnUo3mEAvTC9bsHWIkiBFETSg45fcUL44RBNHkmD53eUFs222LP2/Y4L9dXhdBEARBSJAgRhAMnSCm+nkkEdQhZvO4yCFW/J8ikwQRKbwIRoIYQRAeYRxigNohxjoJm/XchSAIggiEBDGCYKhcUDoxaSQRNMtkGIdYs55UkkOMIGoCCWIEQSgxCWLsb0GCGOvbGannbzHnnU3v4I75d/iiVQRBVM5RRx0F13UxYcKEeg+laaAOMYJgqFxQKkfVSINmmaweEsQIoiaQIEYQhBJbh5gpMtnaCmQyzXvuUmO+/uDX8fzq57H35L1x6PRD6z0cgiCIiiCHGEEwVA4xXXxyJBHlLJMjVRSsFopMEkRNEEr1XRKcCYIoUWlk0nXL5yzMITZSz99izpbBLQCArsGuOo+EaFbInUhE8RogQYwYHhYtAq6+GhgcrPdI9Kg6xMI6xFwXWLcu2nFVCznEqoccYgRRE3gRjBxiBEF4VCqI8YX6bW3iuohIYcdsOnYTw01LSwsAYGBgoM4jIeoNew2w10QlUGSSGB5+8hPg//4P2HFHQJrWNTaoerLCOsQuuwy44ALg/vuBE06IdHgVE9QhFsYh1qwnlSSIEURNoMgkQRBKbASxdLocmdy0qfgZzQti7AKpWc9dagwJYkS9SKVSmDBhAjZu3AgAGDVqFBJsEg2iKXBdFwMDA9i4cSMmTJiAVCpV8bpIECOGh66Snbq3t77jMBGFQ+ydd4r/L1wYT0GsWocYRSbrOw6CaDBIECMIQomtQ2zKlOJskoUC0NlZjkkCVKpfY5jDl+LuRD3YruQOZaIY0ZxMmDDBey1UCglixPAwEgSVIEHM5htGdpIWp28jo4hMkkOs+D85xAgiUkgQIwhCia0g1tICTJ5cdIitXw9Mm1ZejhxiNYUcYkQ9SSQSmDZtGqZOnYoc7wwlmoaWlpaqnGEMEsSI4YEdqOIsiAVFJkeqk0oXmQwjclGHWPF/EsQIIlKEUn2H3l8EQZSwFcSAYo8YE8QmTy7+LZkE2IVSs5671JhqBLGCU0BPpgcTOyZGPSyiyUilUpGIIkTzQqX6xPAQR+eUTBQOsTgKYuQQqx6KTBJETeBFMHIZEAThEUYQYz1iGzaUl29pKYpi/LqISGHH70qO3V+894uYdtU0vNfzXtTDIgiCCAUJYsTwEEehSIaJPtV0iMXRSUUdYtVDDjGCqAkUmSQIQon8uXv++cBZZxV/5kUvQJxpkhfLWMl2nM7JGgh2zK7E3fvG+jeQKWSwpHNJ1MMiCIIIBQlixPAwEiKTUZTqx/Fx8iIOzTJZGeQQI4iaQIIYQRBK+M/dfL44i/f11xeL83UOMZ0gFqdzsgaiqshkqYjfRZOeVxIEERtIECOGhzg6p2RUoo9OTNIRRydVlA6xOD9/tYQcYgRRE4QOMZqpjCAIBi+I8YXZQ0Pl39OlKuSxY4v/DwyQQ2wYqbZDDABcem4IgqgzJIgRw0McnVMyUTjE4igcBXWIkUMsGBLECKImkEOMIAglskOMkc36HWKtrerbSBCrKexLjEq+zCCHGEEQcYEEMWJ4aARBzOaEKo6PkzrEqocikwRRE0gQIwhCic4hFkYQo1L9mkIOMYIgGgESxIjhYSQIKioXVKUOsTg9zqAOMZplMhhyiBFETeCdBSSIEQThQQ6x2FONIJZ3is8pOcQIgqg3JIgRwwM7QYnzSUkUDrE4RiaDHGIj9XENJySIEURNIIcYQRBKwjjE2tqK/2cyVKo/jDCXV1Wl+s16XkkQRGwgQYwYHuIYJZRhYkcUHWJxepy6DjHV4w1aR5we13BCkUmCqAlCqb5DgjNBECXIIRZ72PG7kmO3F5kkhxhBEHWGBDFieIijUMTjumrHVFhBLI5OOP5EstLIJDnEiv+TQ4wgIoUcYgRBKImiQ4wEsZpSVYcYOcQIgogJJIgRw0MchSKeoJkYgXDRwjgJf1FEJqlDrPg/CWIEESkkiBEEocTkEGO/p9PF/1WCWDpNpfo1JpJSfXKIEQRRZ0gQI4aHuEcmg0Qj+eeg9cTpcQaJfWEeF9CcJ5YUmSSImsBHbUgQIwjCg/8iyrZDjBxiwwpzeZFDjCCIkQwJYkTtcd34d1AFzcQo/11HHKOFUTrEbJdvNMghRhA1gRxiBEEoqaRDjEr1hxWvQ8ylDjGCIEYuJIgRtWckuIuicojF0QkXJPaRQywYEsQIoiYIpfoVXFQRBNGgVNohxs5XyCFWU3hnVyVfZuSdvG89BEEQ9YAEMaL26MSmOGEjiDVqh1iYWSaB5jyxpMgkQdQEcogRBKGESvVjDf8FRthjt+u6njOMHGIEQdSbugpiO++8MxKJhO/fN7/5zXoOi4ga/kQmTkIRj65nS/d3HXGMTAZ1iIUR+myXbzTIIUYQNYEEMYIglISJTOo6xKhUv2YI7l4n3LkRL6aRQ4wgiHqTrufGX3nlFRS4C8y33noLxxxzDD772c/WcVRE5PCCWEQffK7r4tN3fxrTxkzD9cdfX/0KdYJPoznEKo1MhhUGGw0SxAiiJlTjMiAIooGpxCGm6xAj0SVyqvkygxfQyCFGEES9qasgNmXKFOH3Sy+9FLvtthuOPPJI5fKZTAaZTMb7vaenp6bjIyKiBpHJdX3r8MCiB5BAIhpBLMhFJf+sI+4dYir3GznEgqHIJEHUhGpcBgRBNDCVlOpTZHLYqEoQI4cYQRAxIjYdYtlsFnfccQe++tWvIsE+wCRmzZqF8ePHe/9mzJgxzKMkKqIGkUl+dppIPkyjKtWPY2SSOsSqhxxiBFETKDJJEIQSk0OMndekS9/rBwlicfqSskHgv8AghxhBECOZ2Ahi999/P7Zu3Yovf/nL2mUuuOACdHd3e/9Wr149fAMkKqcGkcnIYzZBMzHKf1fhuuX1xOnki2aZrB5yiBFETSBBjCAIJTqHmByLBPQdYuQQqxnVzBBMDjGCIOJEXSOTPDfffDOOO+44TJ8+XbtMW1sb2tiHHjFyqEFkkv92qeAWkEKquhVG4RCLq4sq6LHZjJU6xIr/k0OMICKFBDGCIJRU0iGWyxVvZ7dRqX7NqObYnXfK56XkECMIot7EQhBbuXIlnnjiCdx77731HgpRC2oQmYz8IspGEAs6oYrrbJpB/WjkEAuGBDGCqAnVxG4IgmhgTA4x9rssiAFAf3/5NnKI1YzISvXpuSEIos7EIjJ5yy23YOrUqfjEJz5R76GMaP73vv/F1x/8er2H4acWHWKc3TqSImabyGTQ2GvghIsE6hCrHopMEkRNqCZ2QxBEA8N/EcWfRw4MlH8mQaxuVFNdIkQmySFGEESdqbsg5jgObrnlFpx66qlIp2NhWBuRbB3aij/P/zNueu0m9GX76j0ckRq4i2LpEIuri0o3rjCRybiKfcOFTiQlCKIqKDJJEIQSnUOMCV6AWRBLp6lUv4ZUM0MwOcQIgogTdRfEnnjiCaxatQpf/epX6z2UEQ2fx986tLV+A1FRw1kmgYhcBUGxQvlnFXEVjaqNTDqOKAg148kL/5gpNkkQkUGCGEEQSnQdYipBLJUq/uNvJ4dYTakqMkkOMYIgYkTdLVn/7//9P/p2IAJ4gWjr0FbsMG6HOo5Gohal+lHPMhnkopJ/VhHXDrFqS/VlAagZ36/8Y87nyyfhBEFUBQliBEEosXGI8cmS1lZgcBDoK6UkqFS/plTT/0gOMYIg4kTdHWJENPACUfdQdx1HooAXimoQmYy8Q4wXjcJ0Z8U1Mql7DOzvYWbPlNfRLJBDjCBqQuRfbhAE0RgEOcQSibIrDCjHJlnHGDnEago5xAiCaBRIEGsQZIdYrBgJkUmbDrGRGpmstlRfLpKP02MbLkgQI4iaEPmXGwRBNAY6hxgvePEwQYwik8NCNROikEOMIIg4QYJYg8B/GMVOEKuBUKT7Zqon04M/vPIHrO9bH26FUZfqx0k0CuoQo8hkMHJkkiCISKDIJEEQSoIcYrIg1tbmv51K9WtGNcduvveYHGIEQdQbEsQahBHjEItITOEFQP6x3zrvVnzzkW9i1nOzQq5QEyustEMsTqJR1A6xOD224YIcYgRRE0gQIwhCCfvcdRwgmy3/XSeIkUNsWKkm7i5EJum5IQiizpAg1iAIHWKZGHeI1SAyyX8Qb+jbAABY27c23AqbxSGmEvuoQywYEsQIoiaQIEYQhBL+c3doqPxzkCDGl+qTIFYzwsTdc4Wc8LsQmSSHGEEQdYYEsQYh1g6xGkcmeTGwP1c8UQo9sUBQrFD+WUVcBbFqZ5mkDjGKTBJEjaimh4YgiCZhcLD8cxiHGM0yWTNsv8x4evnTGHfpONzw6g3e38ghRhBEnCBBrEGIdYfYMEYm+7LFbwZDu+R0M0RW6hCL0wd8kNhHDrFgyCFGEDVB5/YlCKLJ0TnEdKX6rEOMfUaTQ6ym2B67566ai6H8EOaunqu8LznECIKoNySINQixdojVIDKp+2YqEoeY64rdFd6GQnSIxclFFST2Ual+MOQQI4iaQJFJgiCU6AQxRjot/s4cYgwq1a8ptsfunkwPADE2SQ4xgiDiBAliDUKsO8RqECXkBUAhMpktCWLVOMSAygSxODrEXDc4Jkml+sGQQ4wgagIJYgRBKAkSxHSRSf52cojVDNu4OxPE+JklySFGEEScIEGsQRgxDrEaRCYjcYjZCGIjsVRfFm/YuHQxyjDraCZIECOImkCCGEEQSvjPXb5DjEGCWF2xdohlSw4xp3wtwItj5BAjCKLekCDWIMS6Q6zWpfqKDrHB/CCyhazvflp0os9IL9WPWuizWb4RocgkQdSEMDOVEQTRRIR1iLEOMUY6TaX6NUT3xbSM0iHmkkOMIIj4QIJYg8BfSIR2R9WaGnRr6co8WWQSCLkfdDMphhGOauCEq5oohD7qECOHGEHUCNuLKoIgmgvXddDVXvqFHGKxo6oOMYc6xAiCiA8kiDUIsXaI1UAo0nUXsMgkELJHTCf6hIkWjgSHWLXON5vlGxESxAiiJlBkkiAIFd/6ryym/AB4cyqq7xBrxvOWGlNVhxh9EUIQRIwgQaxB4D9QMoUMhvKKk4d6UYtSfe7DlP+mqa4OsWEQxFzXxV/m/wVvbXzL7g42DjGaZTIYikwSRE0gQYwgCBVvTHFQSAILpoAcYjFEl9SQ6c30AhA7xKhUnyCIOEGCWIMgd6/EyiWWy2H5BOCmg4GMmwtc3AZtZDIqh1i1TqoanXz96rlf4Uv3fQmf/b/P2t0h6tkz+XU0E+QQI4iaQIIYQRAqCqUrlEICaodYOi3+LneIkSBWU8JGJrUdYvTcEA2G4zr43uzv4a9v/bXeQyEsIUGsQZDtyrHqEcvlcO7/A77+KeDh0WsjWaXKqu24DgZyA97fI3eIBQlHNehKE4bkOvjx0z8GACzcvNDuTuQQiwYSxAiiJtjGbgiCaC4KJS2rkETlDjEq1a8ZNhOiuK4b3CFGDjGiwXht3Wv49Yu/xo+e/FG9h0JYQoJYgxBrh1g+j6WTij92JUPM/GhA1T/Ai2FARB1iMYpMPrT4Ie/nHcbtYHcn6hCLBopMEkRNsI3dEATRXBQSxc9dR+cQo8hkXbFxiGUKGS8qyTvE+J/JIUY0Gp0DnQAQr/oiwggJYg2C/M16rASxXA5rxxZ/LCCaCx7VN1N8fxgQch9ELRxF/AHvui5mzZ3l/T6ubZzdHW2EPpplMhhyiBFETaDIJEEQKjyHWBSCWDN+kVdjbIrxmTsMkDrEXL1DLFfIkUhGjGiYIYMXfol4Q4JYgxBnh1gmN4TNo4s/RxWJUbkK+P4woLFK9d/reQ8vvvciNxTLk4WoH5fN8o0ICWIEURNIECMIQoXXIWYbmaQOsWHF5tjNC2JCh5ij7hDbMrgF21+9Pf73/v+NcqgEMaywa3ASxEYOJIg1CL4OsTBxwRqzzi1/IBYi6goQZpl01Q6xyCOTYTrEIj75kuOg1p0Lug4x/u/kEAuGIpMEURNsemgIgmg+BIeY6ryDIpN1xab/UXCIFYIdYvM3zMemgU14ZsUzUQ6VIIYVZsggQWzkQIJYgxBnh9hatyxMscik4zrCh2NYVBdRfdk+YZlGcojJJxuROsSKKwy/jmaCHGIEURPIIUYQhAqhVF8FlerXFZv+x7AOsa7BLgBAthBN3/Bw4rgOHl3yKDYPbK73UIg6wwwZfEyYiDckiDUIce4QW4uyUMU+NI/58zHY47d7YDCnsMFbYBWZrMYhVm2HWNSCmCR4WjvEdHHHMIIYOcRIECOIGmHTQ0MQRPPhRSYTmgXSafF3cogNK2EjkzYdYuzaZSQKYo8tfQwfv/Pj+O7s79Z7KESdsY1MrulZQ27ImECCWIMQZ4fYmkSv9zOLTM5dNRcru1di6ZalFa1TZdWuKjJpIxyFcYhFfPJVsUPMRuhT/c5DHWIUmSSIGkEOMYIgVIR2iJk6xJrR2V5jatEh1jU0ch1iK7auAACs611X34HUmWVdy/C3t//W1J/nfKm+6Xrti/d+EUfddhTe2fTOMI2M0EGCWIMQ5w6xtcmyUMUik+zDcEP/horWqXIVVFWqH0XXFt8hFleHWCWRSd2+aSbIIUYQNYEEMYIgfLhusEMsKDKZTpNDrIbY9D+G7RCLMjI5mBvEXW/ehS2DW6pelw2stqXZY3JnPHQG/ufv/4N/rf5X1eta37ceG/s3RjCq4YU3pZjOa5Z3LQcArO1dW+shEQGQINYgxNkhtjZVLoQvlA4M7MNwfd/6itbJP172M3OITeqYBCDkPrARjuoZmYzKIaaLTJJDzAw5xAiiJtgUMxME0WS4bjQdYiSI1QybuLvOIcb/rHKI5Zyc/XmuhptfvxlfvPeLmPXcrKrWY0tvppiGafYideaU29S/qar1rNy6EtOumoaDbjjIuE9ffO9FfO2Br1W9PQD4/cu/x9mPnm187f1j4T8w7appeGr5U9pleEOGaey9WXrNxAUSxBqEOHeIrUmXe8IKcIUPzg19lTnEVBdR7NuZ6WOnA2iwyGStHWImQYw6xMghRhA1ghxiBEH44BxiTiUOsWSy/K+0PiJaquoQc8wdYvLyMhc8cQFm/mGmMQmyuHMxAGDTQPVCiQ1M3KhmwrB60ZvpxR3z7wiXrNHAHHmm5++9nvfwiTs/gSeWPaFd5qyHzwJQdE8N5Aa0y13+/OX44+t/xD8W/aPCEZf54RM/xG9f/i2WdS3TLnP/ovuxvm89nlz2pHYZ/nWsE7tc1/VE1CheM9lCloS1KiBBrEFgHy5jW8cCiJcgtlYQxBzhg7AWkUlPEIsiMtloDrEoZplsxhNLEsQIoiaQIEYQhA/eIWYriPEdYuw2cojVDBt3r7ZDjI9MKhxigDk2edNrN+GdTe/g9fWva5dZ11fs8hquCKMnbgRs78FFD+LON+80LvP6utdx+gOnD1uU7g+v/AGn3HcKfvPib6paj+M6XuzVJPLc/dbdeGTJI7jx3zcqb39o8UN4dOmj3u+mdTHB0ySa5Qo5/Oq5X+HVta9qlxnIDXjXkUP5Ie1yzMhhep55Q4ZOpBrKD3nvg2pfo5l8Bnv+dk8c9qfDqlpPM0OCWIPA3lTbjNoGQEgxqMasbc14PxfgCB+ElUYmVd0FLDLJBLFMIYNMPuO/swobJ1XQCdVI6BCrJDJJHWIUmSSIGkGzTBIE4YPvEOOvVDo6yj+bHGIkiNUc1WzvMrwg5riOt5zOIcbEFEAvgmwe2IzOwU4A5qgZK7cPct/MWz+v4gm+eGzibwWngP/5+//glPtOMV6nXf3i1bj59Zvxt7f/VvW4bGAxx6C+rt+8+Bv8Y6HeidU91O09n6b9sKp7FQC98HThUxcKv5sEIxsB7vFlj+PCpy7EDx7/gXaZzQObvZ9NY2dGDtP2bCKTOrG4Et7tehcru1fi5TUvVx01blZIEGsQ2IfL+LbxAMrxwXrTm+lFb5rr+4Ib6BDrzfTidy//zvjNiOqDmCn708ZM826zjk1G7RCL+yyTYWKQ5BAjhxhB1AhyiBEE4UPnEBs1qvxzGEGsGb/IqzFhI5NA+cK/GofYos2LvJ9NooSNQ2zr0FZ86OYP4ahbj9IuY4tNZLIn04PB/CAc1zFep7HrH5PrCSg6g6IQQLYMBcccF3cuxndnfxdnPnymfj3cBAamda3uWW1c5r2e94TfTW5Bm4jmmp41AMzXhJ0Dnd7PpnUxI4duGcd1rMQu9noBqo9Mssdn2h5hhgSxBoF9uIxrGwfAP+NivZBFLdkhpuoQu2XeLfj2o982FmHy62A/M4fY2Nax3n6wdspF3SEWV4cYdYhVBgliBFETbGYqIwiiydA5xEaPLv+cTov3IYfYsFKJIMYu/G06xHQiyMLNC72fTf1MzCFmEghWbl2JofwQ1vSuMQpLeSfvE2lkbCKTvCBjWo45tUxCyeru1ZhyxRSc/sDpxnHZwEQl075a3V0UsdjjNK0HCBg7E8Q0y8jPhWld3kQMhmWYo9C0jI1DzHGdwOemN9MrvKa1ghi3H6uNTPLX2qbncEnnEvz+5d/bp6eaCBLEGgT24cKEoLyTj2Ta4mrxC2KiQ0wVmWQfOqZ+MdUHcV+u+G3L6NbRnlPOukut2TvEaJZJMxSZJIiaQA4xgiB86BxivCBm0yFGpfo1w+bLDN4FA1g4xAYtHGKdnENMIyQwJxZgFkH4iKBpluNvPPwNzPj1DLy85mXtMjaRSWHSAItxmdY1Z8Uc9GZ78cJ7L2iXAYB3Nr2Dhxc/bFzGxmUV5Izi1xO0HBPXdM+x/KW/bl1D+SEvdmnaHhO7TMsw0QzQPzddg13ec6Jbl3zdORyRyTW9ZYeY6TF+Z/Z38K1HvyX0sxFFSBBrEGSHGFB2TNUT/k0KlAQx7kNn08Am3wcp+0A0udz4+8gdYmNax2B8e1EQs45M2jipgkQuvkOsRrNMJlA8M4y8Q8w0XuoQI4cYQdQIEsQIgvBh4xCjDrG6YtP/6HOIlS7WeQGAnc8O5gaRKZSdKzYOMZ1wweKS/DZV8IKYSZR4ac1LAMS4pgyLQNp2S+nGVXAKVgIO2w9BYsqJd5+I4+86Hsu7lmuX8QQxw9g9QczCiWUaVyafKfdwaR6frUPMpnOOH5fJKMI7xHTj4o0aumXk687hjkya1vX2xrcBiPuNKEKCWIPABJP2dDvSyaKNPA6xSWVkUur/4g+gQNn+yj5c/vT6n7Dfdft5pY/sft46WWSy9HhHt5QdYg0TmSw9RvbckkNsmCFBjCBqgk0xM0EQTUbUHWLNeN5SY6rqEHP8DjHZWWPjENOJDSwuCdg7xEzLseSKSaCyiUzaOMQ6Bzu9/Wka08LOhYHbG8gNYHHnYgDl2RhVhHGIuVLSR7UeQD92wc2ki0xKX/rrXgt855zR/TUQHJnkO8R0rys+1aRbl3zdGUVkck2POdJr4xDLFrKB3W3NDAliDQITTFKJFEa3FL9Bi4NDjP9QAvwOMcAfm5QFsdMeOA1vbXwL5z9xfnk9im+m2OMd3To6vEMs7qX6jiSIUYfY8EKRSYKoCaovNwiCaG5cx4ETpUOsGZ3tNaaqDjE+Mlk6n+XFDUAtgmQLWby75d3y+jQX9rxDzOSgEgQxzboGc4NWDiqbyKRNh5jNmABgwaYFgWPiZ8/UiUq5Qs57nowOsf7ytZpuXTaRSRaXNK3H5xDTrMu2s8zGcSc4xDTr4nuvq41M8g4x02vmnnfuwQ6/3gGXPX+ZdhnefKIb+4qtK6yE1maFBLE4kckAv/hF8V/Ii24mmKSSKYxuLQliMXCIsWx3kk1uKDnEAH+xvheZlAQ9oUhfEZlkAlokDjGVcDQSHWJRRCbJIUYOMYKoERSZJAhCxuGdo7YOMVWHGDnEaoZwHq74MiPv5H2zJJocYnKMSyWWLOtaJmzLyiFWZWTSxn2Td/LlLiuD2GDjELNxreUKOU/sMj2+JZ1LAtcljMnCIWZazkagYi4l03rYuUAqkTKuS4hMWvSDmSKTQoeYRWRS93qxjUzyYrHpNfPsymcBFLvgdNi8RnkhOQ4d43GDBLE4kckAP/1p8V8unHobV4cY++YnzfSlChxijDGtY7yfVRdRXmSydbS3D4KmLPbQuaD4v4fpEKvRLJOpZCrkHSOITFKHGAliBFEjSBAjCEKmwF1EWpfqU2RyWAk6dvORsI50B4DyxXqlDjG+Pwyw7BAzRSYHgsUnfnZJ08yC3jK2kUmd6GLhQlq+dbl3m+nxsbgkYOnqsugQMy3Hr0snBPEOsaDIZGuq+J7W7QchMmnjEKtylkmbfWDtELN8zbzbVRSydM9f3skL49Jtj60naHvNCglicSLJPR0hP7xVDjFZUKoH7JsfJogVEv7suTybpK5Un4lcgOQWYx1iXKl+IlFl+Xy1DrGoI5OyQyzqyCQ5xMxQZJIgagIJYgRByAiCGH+lwjvE0mnxTrwgxm6jWSZrRtCxmzlg2tPtGNVSfN6q7RCTS+21DrEKSvV1ywmF5ZplbONvQqm+jUNMsz1eGDRtb8kWziFm4+qK0iGmi0z22Ecm29JtxuVsHGIFp+AtV+0sk1al+rYdYpavGSZkmSK2/PtPN3Y+PkuRST/p4EWIYSPBfQ0W0oWjdIjFIDIpO8QKcH1vfP7bkIJT8Oymfdk+IRqoc4h5s0xypfrebIxRls+H6RCrkUMsslL9MK4v6hAjhxhB1AibmcoIgmguCgWFQyyZBNrbywuRQ6yuqM7DeZggNq5tHJKJojBp7BCziEyyInmGtkOsN3yHmG65sA4xx3XguI73mHlsHGI2kUnWH2ZaDyAKYtU4xHKFnFXHVlSRSfaaaEu1GddlU6rfNdTlra/qWSZ5956uVN92lslM8CyTjut4s4NqJyngBFvAEJkkh5gRcojFCV4Qq8IhxoSjWEQmZYeYKjLJFTXyB5K8kxemYOYFMXlmsoJT8PL7o1tHV+8Qi/ssk1E+LtXvpnU0e2SSHGIEERlUqk8QhIzSIZZOq0UvBpXqDytBX2bwghg7d2XiAC8SeB1iFpFJ5lJiX3pbOcSqnGWSF8R0goqcyLERS6pxIfHCoOnx8R1i1RTh8/vJtC6bHi6ryKRrGZnkHWK6WTu52SOZWBm0nFVksspS/Z5sT+Aya3rWeNfBWgdjrySIafaDMBkFOcR8kCAWJ6qJTPIOsRiV6rMDjzEyySnu8jdEm/rL0wQLgpgUmeQfa90cYnyHWK1nmYzycRVXaL+OZvymlRxiBFETKDJJEIRMXuUQa2kxC2KpVPEffxs5xGqGbWRyXNs4tCSLz4exQ8zCIca24wklug4xi1L9/my/0DNsIzjYRCYBvcAR1iGmWw8fmSy4BeU1QU+mRxTXqnB1yV3PNpFJbYeYTWRS7hDTjX0oWMzjRTrduobyQ8J1pJVYGaVDzMbVpdkeP8Okbl2O62BZ1zLvdyrV90OCWJyoIjLpzciRjGepfgs/y6TkAuAPMPI3RPxBmJVyAv4PYvZYE0igPd1emw6xekYma90hFsYh1ownliSIEURNIEGMIAiZihxiQPl2EsRqTtCxm5/5XXaIKTvEMluF+1sJYoqL/8HcoJUTS3Y9RVWqrxsXYOcQC3Ktua4rRCYBtejCu8OA6hxiPkFMMS7HdQLFtYHcgNX2ZIeYVYeYZn/yUUjdunh3GKDen47rWPW7WZfqZ4Mjk4Kry6LjTreutb1rhcQVRSb9kCAWJ6KITMatQ6z0OFKeQ8zfNcAfaOVviOSDMEOY7tkpO8RYoX5oh1jUpfpx7RCrJDJJDjGKTBJEjSBBjCAIGeUsk0EOMcAviFGpfs0QzsMVcXcmPLSmWtGSKjnEquwQk8vWVWIDH5fULQP4BTGrDjFLh5jNDIRWHWKKZTb2b/S5kFTL8f1hQHUdYjYOsd5Mr1jurliGj0uatsfWw55nq1kmdQ4xSexSLSeLZqpluga7hNeI1iE2FPzcAKKIajUzpK5DTIpMqtbFC2umdTUzJIjFiSgik9wskyaHmOM6vm80aoHLnE0KhxizUG8e2KztEOA/2HhXlAO1Q4w9duYQs77AshGOwjjEGnmWyWbs4iCHGEHUBPnLDYIgCOG4wE6NbQSxtjbxNnKI1YygLzPYhXlLqsXKIcbO/9kX2kpBjKVOkqLAxsPikmw9tr1YquVyhZzYG2XpENMJHDazTAbF8pj4xtxTuuVkh5g25mgRO5QFMdVzwwtrgHofMHFxyqgpAIrXNqrXjm1kshKHmGo5m2VsY6O2kUkWKTaty6YI3yYyyc8waVpXM0OCWJyoZpbJkA6xL9//ZUy9cipWbl0ZfpwhcB19hxjrBHNcx3tzyt8Q8T0AvCtK/maKn2ESKH8QRiocBZ1Q8R1icXWIVRKZJIcYCWIEUSPIIUYQhAzvEHPYqXE6XRa82O8yushkM36RV2NUx+6CUxY42Hl9S7LFqkOMuae2GbUNALNDjAklKrGBCRfTxk4rbrOKyOT6vvXCdYS1Q0yxnOu6gQ4xm14zeR/olqupQ0yxnCyIKeOs+UEAwKSOScZ1eU7AVAQOMalDTBmZlJZRva54oRIIdgGObR2rXRcgRSZ1gphFEb5NqT4vrJm218yQIBYnqolMqhxiBkHs1bWvYig/hHc2vRN+nCFwC7JDrDzWUS2jvOUy+WK22dQhppuNrOAUyl0FkkNsWEv1R4JDTBeZDOMQa3ZBjCKTBBEZJIgRBCETWWSSHGI1Qz52F5wCDrj+ABz+p8Phuq53YW7tECt9Ib7t6G0BBDjEUqLAxsNmnB/fNl67DGAXmfSJDRbxN0AtSgzmB4X7q5axEenYPkglUsbZNld2Fw0PzChQVYdYf7A7yieIGYSuIDFPdojpxFEbMc8mDmmzDJsAblzbOO0yQNkFOHnUZAB2pfpWkckAhxgTGU2Pb2L7xOIyFJn0QYJYnIhCEEvYleqzcj2+ZK8WOKU3ucoh1tFSLsln4/A5xHSRSU2pPnvsyUTSdx8jNsJRI3SIkUOsMsghRhA1gQQxgiBkCgVFZJJK9WOF/MV052An3t70Nl547wXknbzoELPpECt9IT519FQAlTvEZDEl7+SV58w2kUm+P0w3JsBulkmbbimfC8ngEEskEt41gUrgYH8b2zZWuwwQnUPMN5ujhdAFmGc89DrENAIjf99qZpn09Ywpltk0sAkAMH3sdADq5zhbyHouOJMgli1kxZJ7jeOOdxTq9hO7/vUEMc2EB0B5f9Isk35IEIsTUUQmLR1i7BsU5syqFSwyWZ5l0vU+CFtTrUglUsI4TA4xU2SSHYCY6yx0qb5OOOL/3kizTOoeb5h1NBMkiBFETSBBjCAImYodYnKHGJXq1wyVQ4yRc3JGhxgvEjA3GUt6bDtmW28dum2auqVULiRV6f/GgWA3liyI6QQjNnZvOcXY5dkHq3WIJRNJo1OOLcdihzYOMZ1TKaoOMfbcsHED/sfIX7eZZhOVDRS650YWu1Rjlx1iqrGz9TPjhbLfjXN9TeyYqF2XzayktkX4vr41g4gaFEFtZkgQixsVfpsV2iGWHx6HmMucTQqHWCqR8qnVRkHM4BDjyzsBLjIZZYdYkBDEd4hFHZms1CEWRak+OcQoMhljnl7+NLa/ens8tPiheg+FqADBZaC4aCEIovlQluqTQyxW6M7DgeLFO7voTifT/g4xR3SI8QXjrHDdFJn03F+u3iFmEl0Ay8hkTzEy6YkgVcwyaTMzJBuTqX+K7fcEEsbJBeQZOXUCo1BMH1Cqz54bU2SS9UObnErJRNIbu/w889dtJuFTvl7Ujd0mDslcZMykYeNuMwm2QFl8UgpiFo7CVd2rAAAd6Q7t9gBOZCztTxvXJEUm/ZAgFjcqFcRCOsS8yGStHWIFsVTfSbhC3xk7YMiRSXYA0DrEJKs2OwCwg1lohxgTOeRvE2MSmWQH2dAOMZvZM1W/81CHGDnEYswTy57A2t61eGzpY/UeClEB5BAjCEIm8g6xZnS21xifQ8wVHWLeF9VJRYcYH5l0XeFi39R5JV/8m0SXoJ4qm8gkE3mCCvptZpkM4xBjsbygyCQT/UzCmckh1j3ULU4aoBhTtpD1HHDeuAyl+qwDziQqJZDQCkv868rUIWbtELOITDLRzHMnWjgPTe49AMbnxqZzju2XoMirjdhlI+Y1OySIxY0K7d2xdYi5oiBWgDgjJntzypHJHcbt4Bsff6CRI5OegyqbB77xDSQ6O333McJEDnYSVW2pfq0ik/3FaGhooY/NxkQOscogQSy2qEp6TfRl++zfP0TNIUGMIAiZqh1i7JyHHGI1Qz4P9znEWGRS1SEmOcQ8kYcTSmwcYjbxMEAtSjDxyXM9qcQ1iKJSNbNM2nSIeRMCtI/XjokXlWTnnWo5U2+UHHMsuAXf+RH/XLHuZ5N4aCMq8WKeKTJp2u9se0ZXl+t6kUmT0MpEs+3GbAcgQGAMcNwBxedGFoF5bGclBYJFLJvlfG5BzYyqN7x6A/761l+V22l0SBCLGxV+mxXGIea4jveGqX2HmCIyyTvE0mqH2IzxM/zr4g6SOqt2+r21wHXXITFvnu8+RphwZBLEwjjEoo5M9hcPnulXXiuuPkqhT/U7D3WIUWQyxqg6SXSs6l6FqVdMxWkPnFbrYRGWkCBGEIRMZB1iJIjVDGNk0ilHJpWzTEoOMU/kSQQIYiFK9dk2gQBHU0nAMXVemYQEwM7xY+MQCyNuBJXq+3qjDPuARTRV2+SvN0zrGsgNACjP7ml6bngxzxSZNG2PGSimjNYLmt2Zbu+1xsQu1T5l18JsP9hEJlWTNaieG9V+4CPCumVsY46y8GnjEFO9t5ZuWYozHz4TP3n6J8rtNDokiMWNYegQ498Ite8QkxxicocYi0xKDrEZ4xSCGO8Q00UmC6WDUS7vu48RWThSRQvDdIhF7RAbKD6X6aHifqpY6KskMkkOMXKIxRh2LFB1ici8s+kdDOYH8eraV2s9LMISEsQIgpCxcoil0/AhRyapVL9myMdro0MsoEOM75YyXbSHKdVPJMwOKm/Ge0NHk42QAFjOMmnRIRYm/pZAwqpU3xu7o3eIMVFQtU2hF8vgNvMJMwFink70C1uqz2YlVXaDce4wU/QwjPOQj+LKzzP/3BgdYhYCqhx5NY0paOxh1sUqh5oNEsTiRqWRyRAOMd4VVmuHmONziEHrEHNcx7MUs8gkj5VDrPRGTjiu7z5GmHDETqaqdYhFLYix/VgS/CJ3iJkeG3WIkSAWY8JEJsO4yUYsCxYATz0FZEfGtNr8sdw29koQRGPDO8ScKDrEmvG8pcbIk6DwIkloh1iFkckgF5LRQWUjPlnOzscEDpMoEZVDTFVMb1Oqb4pMst4v1TZtRRebfjflDJlyZFJRqq/sECsZKLzOMsX2WPfZ2LaxWkcaP3ab15Wpm05wiCWCI5NMfLKKTAZ0loWZeTXouWlGmvNRx5lKI5OlD5dkIinkpFVvRN4VNlyzTLaUPvtMDjG+3DHQISZ3iLGOLZcJYo7vPkaq7RBz3dpGJpngl6+B0Kf6Xdi4dJFKkcn6jYPwEUbkYseNhp3NMJMBDjsM+M//BKZOBa68Urx9/Xrgu98FPvxh4MADgRUr6jFKAf5YTg4xgiAAKQXABDHeIZZKlc+XeUaPFv+nUv2aIR+veaEhW8h6F93pZNonfvCf1y7CRyZNzihBMLJYzuhosoytMYFjUsck7XJMEGOCg5WTx7KHy6ZUX7Uuedyq5axFlxCzMBojk3yHmOG5YWLXxI6JgdsLei2EEUcFQUyzr5KJpJVDbEL7BO0y8phcuL4vDfl9FWqWSdNzozquNgEkiMWNameZTJQdYoA6NjmcDjF/ZJIriE+mhW8umNrfke4QDs4MwUngihdRXmRSFsSi7BAznVCFEZgqoCA57UILfdUIYuQQI4dYjGHvDRtBrOEdYn19QFfxOIrubuDSS8Xbr7kG+M1vgBdeAN54A3j88WEfogxFJgmCkFFGJnmHmModBgDf+Q5w1lnA5z5X/J0cYjXDJIjlCtIsk5JbRohMRu0QU0QmTZ/5pmV8EU2FkJAtZL2xTmyfqF0Xi0yaRDOr6J7KAVdhqT7bV+lkWltOb1tyb+WysolMhnQ9GcVD1ayWFjOTGl8LSb1DzLZUn3WIGV8LCgHO1LcWJu5pFFopMknEgmpnmUwWXVfsGwhVbHJ4HWIlQawkVBUSblm8SnIOsULGy4NP7JiIMa1j/OvSRCb52W3KDrEQ0ULHKe9vU9eW6TnJKb41ivAEzBMRmSBWbYdYmF4w6hAjh1iMYd1hNq6vhhfE5ONQZ6f4t26xwyQOrgkSxAiCkFGW6qfTRedrSwuwg79WA0DR+fqHPwDbliJgJIjVDPl4zV9ky5FJ2Z0jf6nNO2usHGIWsTyhY8swk6GV6GIoLGdOJcDsVmIOMW9WywhEJavIpEHE4l1B1UYYrdxtFs8N/7oK24tlKrm3iUzauMhSyVTZ5ad5XQWV6svOvEojmqp+N9N+t3FDkkOMiAfVzjKZSCGRSBiL9YfVIeaV/RdfaoWEOFb2Rs/kM55DbGK7RhDTRSadQtlBVYlDjBd8TE6qMC6q4saDt22JF5ms1CEW5SyTzXhiSQ6x2BJG5GoaQaylpRgpAoCNG/23M2LwXtY5fwmCaF60DrFJk4CXXwaeeMJuRVSqXzPk4zX/BXuuwAliSUWHmKZUPzAyGdIhpnNQWTtrLBxbLP7Wnm4vF/SrZjwsdSRPHjU5cHtMmHFcxyc8Vlyqb9iffITRKLpYiF3GMVkIVCpHms1rAfC/JkPvq6Sdg0rnKgxbqm8T92QilmpcgrCbDBYPbcRmcogR8aDaWSaTxYsgFpvkv7lgDK9DrGTHBRPEXG2pPvv2ZGLHRCH26a3LolTfi0wWQnSI8YKPSTgKUzwv37dKfJHJKKOgxRUaNk4dYiSIxZdQHWKlY0/Dlrczwau1teikAIANG8q3y8epGLyXySFGEISMIIjxDjGg6ALbaSe7FZFDrGYYI5MON8tkqsXnZKq0VN8mwqgUeSxcSCZxzeQiY26fsa1jjZ1enkNs9BTtupSxPM3Y+VJ909jb0+0AAkQQk0MsZKm+TTE97wQ0iZVG15NFp5dq9lKbdRknazAJrRU6xExjSifT2vL9WsRLqVSfiAfVzjJZyoAzh5UyMpkfPkHMm2WyNC7ZIcaX6jM32+iW0cEOMb5w1RSZDOsQM0UL6+oQq7JDzBQFVf3Owx4bc5w044klRSZjC3WIcfAOMRYZ4gWxGDrE5OgMQRCEEJnkHWJhoVL9mmGMTIZ0iIUt1bcRsfgidZ+TxzYyKTmHVGNibp8xrWOMnV7semxC2wTtMnJEU7Wc0gFnENesI4wah1jFpfqGMYWOs1o44JRjt41M2nS3KV5XFZfqlwQxU+dcxdHfCp2OVKpPxIsqZ5n0HGKmyGRh+COTLbwgpnGIDeYHAQAdLR2BHWLyzGTlWSaLfws1yyQvcJgik2E7xKJ0iEXVIVbNLJPs29kYXEQPO+QQiy2qE+ygZUkQKxGD9zI5xAiCkNHOMhmWODnEMhng+9+3j3vGHPkz1+gQM3SIVVOqbxRdEiFFngpnomTjbEu3GTu92LqYY8tGVFItF2VkUjkLo0F0Me0HK/eeTWTSUoCzihRa7ivf2IPEvIB4aVBkkj1mdq1uu69sXseVOh0pMknEiwhmmQTKkcl6O8S8yKQniLlKh1i2kMVQfghAcZZJdpAQ1sW98YXeGafgj0xW2iFW6SyTwx2ZtHWIsXEFzTJpEweVXWbNBAlisYU6xDhUgtj69f7bGTFwTZAgRhCEjLZDLCxxEsSefRa4+mrgpz+t90giwXaWyXQy7RMH+M9g3iHGCzNWpfpBkckQzhqTkBC2KN40S6Gx2Fx6fKrlQpfqW8wyaYoBquKlNu4961kfDbNa2mwvlUjpI4WqWS0jiEwGva6CIpM2+8pGrLSdgMAr1TfNEkql+kSsiGCWSSB+DjE25XIh6Z8Rk41jMFdyiKU7AjvE5JiNLzJZaYeY7IKqpkMs0lkmxVJ9+ztGEJnUFfM3ExSZjC3sWECCGEa8Q6xhu90IggiFcI7Hrs8qEcTiVKo/VPzSF4OD9R1HRAR2iHGRSV+HmCM6xGJRqm8TAwxw8pgik6EEI0Mc0laACyWCGPrW+H1lM7sne3wFt2Ce9dFiVktbAccmwmgTmbSaEMAiXhrkELN6HdvEWS2jv1SqH0zdBbE1a9bgS1/6ErbZZht0dHRgv/32w6uvvlrvYdWPCGaZBOLkECs+jnRJqHOgmWWSi0y2p9uRTqY9W7G3Lp1DTOgQK/6tIodYKlU+earUIdZatjnX0iEGWD62KCOT5BArQg6xWOFFJi1mKGTvo4adzTBIEKNSfYIgRgDGUv0wxMkhpuqmHcH4OsS4i/VcQYxMeuKA6/+8dlFFqb5lL5YsOFg7a0KIWLzoUqm4pirM14kgfE+VTR+Zdam+zmVlcHXxy/FxT9MsjDqBSvncWM76aIowGl2FIcXKMA4xq+0FRSYtxEObdbHXgmn20mYt1a/g0yU6urq6cNhhh+Hoo4/Go48+iilTpmDJkiWYOHFiPYdVX6KaZTI2DjEpMik7xNJlhxh7c3a0FKctHt0y2otR8usCpJMlp8AJRlU4xNLpygUxfna3bDZ4+ZAUSvtGEMTgBiv5OncXRSbDQYJYbKk0Mum6buNZw20dYolE8TUdg/ey3AdJEATRkJFJdt7VIC5z+Ysl/nrC5xBLmR1itS7Vj6qMvOAW4LiOIBqoRJcoepxaUi0YzA9qY3JBkUm5VN/kjDLNWKkUgkKU3LP78GMyzjJp+dz4IoU5s0AVxkEV+LoKcIgFlepXKuz6+tbCTg4hddPx/WvNXqpfV0Hssssuw4wZM3DLLbd4f9tll13qOKIYEFWHGBPEYuMQK76ZtbNMFjLeQaMjXRTExrSOQedgp7cu2RXG/73cIVb8WyiHGC+Iyfs/bGSSd4jVcJbJ4updBDpbdR1isqhDkUkzFJmMLUGzTOYKObhw0ZpqFZZxXMc7XjYM/LFsu+2KP6sEsba2YnwnBheJ5BAjCEIm8lL9OJy3NLhDjL9YzxayaoeYwtEdxiFmE20LW6pvErF0xe3877auIKvSeV58ChKoQpbqVzvrY1DsUH583ro4HTt0ZNLkblPsK1OE0dhNF8K9Z9pXtqX68vZcuD6h1cq9F9IhJouVbVC8jikyOfw88MADOPTQQ/HZz34WU6dOxUEHHYSbbrpJu3wmk0FPT4/wr+GIqkOMRSbr4RDjxu6wWSaTig6xhOgQ4yOTAHwzTRojk17HFivVd8v3CdqXUUYm+W8wazjLJAC4NhePth1iNmIfzTJZpEFOZhsFk0PMdV0ccuMhmPmHmcg7eWGZhuwRs3WIyQJ5HREEMacQizERBFFfGtoh1iDnEIGRSVWHGJtlUuMQMzmH2LKAffeStofL1lkjleqrxmVT4s8vZyzodxVjH6ZSfavYoenxRRSZ5J8bm8dnI+aZHp9q7IGl+gFxVttSfd49pxPXTFFcVbzUpk9OtS4q1a8jy5Ytw3XXXYc99tgDs2fPxllnnYWzzz4bt912m3L5WbNmYfz48d6/GTNmDPOIh4GoOsTq5RBbvhyYPh2YNQsAF5lkglgCKDA3F1eqn3W4WSZZZFIq1tdFJh3XKTuomEOMRSbffBOYMQNYulQ/ZtvIZFiHWK07xHbaCbj+evMdqUMsEvJw8Or00jfVDXIy2yh43zgrCtkzhQze3Pgmlm5Ziu6hbtF10Ig9YipBrLOzfByQj1MxeC8LX25s3AB89rN1HA1BEHGATSQENFCHWKNFJqXPXF+pfumCWzXLpOwQC1uqbyWUcM6aijvENFEz1fZMsUN+uVAxQNX2LDrS+OWsS/WDtpcI6BArLRd61kcbgdHSTacT8wSh1SBWVi3AVViqrxp7pTNymsYuCHCGbrpmpK6P2nEcHHzwwfjVr36Fgw46CF//+tfxta99DddrLvIvuOACdHd3e/9Wr149zCMeBqLqEDOV6tfSIfbCC8D69cAjjwDgIpOlN2EhCRQKJUFM4xDjI5M8WoeYUy7VT5X+7EUmV60E1qwBXnxRP2beISYLkpV0iHkDjjAyqXKIrV8HPPxwwB0L4rgqEcTIIYarPgS8/+vAHw9Gw5zMNgqmWSZlR1hTOcS22aYo8LsusGmTeHtMHGK8MwAAHLjAv/9dxxERBBEHInOIxWmWySaKTOYKOe8ztiUV0CFmiEzqZimsenY+RdTM2smjc1AFFalbRBgrdSFVW6pvU+JvmolSHlcYMc/K1RU0mYHN9kyRSZuSe5s4q6VDTO53C1pXtaX6vDDIxqWbzIAik3Vg2rRp2HfffYW/7bPPPli1apVy+ba2NowbN07413BwH96u6+Iv8/+C5V3LA+/GXsg+h5gqMllLh1h/aXulD3wW6/MEMckhJswymSsJYi0aQYx3iElOj/Isk1KpPvvANp2AqBxilXaIDWdkEgg+sdIV4oeJTFKHGFZMKP6/ejwa5mS2UTBFJvmTh5yTay5BLJUCpkwp/s5ik7IgVueLRPmCykmg7mMiCKL+NOQskw3mEDMKYlKpviwO8J+/ulJ9eTl+m0zkyTk5rWgWulQ/QHTRxi8tI5NWHWIWLiQbFxm/HBNd2GRCqmVsHWImUUkluphcSDonoMoZZdqeUfi0jEyGLdXXPj7LUn2byKSVsBvydWwUIikyWT8OO+wwLFq0SPjb4sWLsdNOO9VpRDGAcyg9u/JZfOm+L+Fjd3xMGQfi8SKT9XaIDQwU/y994HuCGN8hpirVV3SIMVGPwSvhvsikJBh5DjGdI4rH1CHGix+2HWI1OAFTzjKZCBgTYD/LZJh+tDicWA4nrotcqXvdSoQkIufm127GtS9dq7xNFcGQb2M/N5UgBvh7xPhSfaDu72USxAiCUCF86RlFh1gcvshrcIeY8AVUoRyZbEkpOsQsSvUBvVjC1ievix+XsYcrpLMmrOhiEkE8Ma9Sh5iFUMIvJ0wIoFuXhSPN5CLTjUsnaJqK4m2W4cdl60gLE42tdAKCsKX6yUTSiyjqHGI2Djhh7Lb7yuAEbEbqKoh997vfxYsvvohf/epXWLp0Ke68807ceOON+OY3v1nPYdUXTkxZsXUFAGDJliX4x6J/GO/GF9UD4RxiVjMx2uJziJUy5eniG9BNAPnSGzqV5CKThUy5Q6wUmZzQPkFYNT/OwMgk275NialqlslKS/VV64iAyBxi1XSINWtk0nWRYzppAg3z7e5IoeAUcNbDZ+Gcx85B91C38nbALjIpuA4CvmQYkegEsfXrxdtjEpmUL6gKSWiPL72ZXryw+oVoP68IgoglwpeejeYQaxBBTBaibB1i8nFfV6ovr5MtC1gWtxsEKusy8rAxwDCzPlboELPpSOOXs9mftiKPTal+VJFJU5m8dl0mMa8GkclqS/VNIqPNbKn869hmQgdBGCSHmEBdBbH3v//9uO+++3DXXXfhfe97H37xi1/gN7/5DU4++eR6Dqu+cJG9rqEu789X/OsK44WA7BBjccO+bJ9vWTkmqXrjV4zkEHO8yCRfSFk8GPkcYlJk8lsf+Ba+fvDXcdI+JwGQHGKu6BArRyaLf/NFJm3FrEojk+xCU1XMHwFVO8SCOsR0j811qVSfd4hRqf6ww59YMxcpjykySQ4xySEWs1J9pUNMc0w757Fz8OE/fRhPLn9yGEZGEEQ9ERxi7PqsUWaZbJAv1XyRSUfsEBMcYtzFuvxllFyqz5eyax1ipqiZRedVJVEzXXzPJirIb9PGhWTs9FIUt9vMaqkaeyhHmmWpvq2bziYyyQuMumisSYBTikqGfWUljkZQqh8qqmojjgY50kJ0xVGpfp04/vjj8eabb2JoaAgLFizA1772tXoPqb5w7qKuwbIg9uJ7L+L51c9r7yY7xMa2jQUA9GZ7fcvKMclIY5OaDjH+w4ttT3aIyZHJfafsixs+eQN2Gr9TaV0ahxjfIeY5xEJ0iJkik2EdYi0tNSlxrXmHmO6x8X9v1g4xziHmAg1zMjtSCBKxjB1i0jTwTS+IxdwhZopMruoudouu7m7AyXQIghAQHGLJ0mdvJQ6xkVSqf9ddwGc+A8yeHY/xBmDbISbPMik7y1xXjEwaxZKwDjFdPIz7gt06lldlZNKqQ0wxdmPZuinCWFounUyXZ300ubEsRBebUn1Tf5aNm04lmqnWFVY8tHHm8YKYT4AL6RAzuve4dWn3lc1rz2JMgDqGSqX6InUXxAgJ7tusLYNbAJT7t25+/Wbt3WSH2Li24oQDPZke37KyQyzSYn1fh1jpgNzit+ymk2nvm4tsIeuLTDKYfVOOSTIc1yk//ko6xFQOMdX9bEr1axaZ1DjEKo1MyvfTPTZe/CGHWPmCvdn2QR2Re0lk2Mk1Owb0ZHrw77X/huu6zeEQmzsX+PnPi+/VsIJYHB1imjGxz41IHc0EQcQSXy9UAo3vEPv1r4H77gOOPRY4+mggK0W7HngA+OQnix2Qv/tdbcdqgez00s4ymRQ7xFQOMTmuFeTG4jvETE4XbRE+93owiTw2Bfa2pfqyYyvIkaYbu3JMATMLBu1Pmxk5g2KHyu4vmzikRUda4HIh4qWmsRuF1hCdXral+jb73TRDpo1rzbcuKtVXQoJY3FBEJg/c7kAAMM426XOItZYcYhmFQ0wWxGrhEJNL9RUZ9lSCc4gpIpMMplbrIpN8h1jVDjH55KmSDrGaRCY1DrFal+rz+62ZBTH2tmSfE00Sm3xo8UP47P991hPn60FYh9jpD5yOQ286FC+vednfISbNTtsQnHcecNFFRWFshJXqKy96AwSxhhEyCYLQIh8bCklU1yEWB2d7kENsaKj88zPPAG+/Xf79hReAE04AHnqoKJQ9+2ztxmlJoEOMi0yGcYgBekGMFxzY9Y62uN1Q7q4SG4xOHssYYJjIZMEtmGOAIQSjIBdSkFPOFCm06bLSjcvUixXkAuTHrRpXmH1l20fGb8/mdaV9fCFK9UOJhwYBzsYtSKX6ekgQixt8ZLIkiL1v6vsAAGt612jvJjvEWGSyP9fv+zbGF5mshUNMKtVPp8sHmSwXmWQHRD4yqXOIhYpMlm4bNoeYqkNsOGaZrHVkkhxifocY0PixyRdeAD71Kfzm6Vn4+zt/x+PvPl63ofAnE6oPelkQW9m9EgCwumd1czjEektfevT0+AWxiRPLtwGxj0wWDB1inkNMcTJLEERjUXAUx4ZGd4jlpGPbpk3ln9nEKIwYPB6TIJYtZIVSff6iXyVO8Rf2gEEQC1PcbjGToW3vl01Pla3owruQTOOycT0ZO8QUM3daOYwMQomxb81iXFaurggdYrxAFSYyadyeQTy0LdXnhUHdcsqeMZsZOalUvyJIEIsbisjk+6aUBLGeNdpifdkhxiKTgL9Yf3gdYkwQ0zjEuFJ9FplkHWIMpUNMjkxKDqpEadFQDrEgQay4QvU6eOEpTrNM2jrEdI+LXz/7djYO37QOJ3KHGND4DrFbbgEefBBDG9cCgPfe7M30DrtbjD8BUJ1YsGMBe4+wZeQT75zToB1i7CIqm/ULYkz4YtGbuJfqG2aZJIcYQTQPkTvEYiAgBSYO2PG7o/SlMC+IyWJZDB6PSRBj5wyAwiEmRyZdsVQfCHaImRw4YUr1bcvWw0bbbGd9tJqF0UYICugjs3HcBYpYBoFRN65KerGECRaSKU8kNYqjITq9lLNMKsRK4+sqyHFnWXJv9TwnEmhNakQsi/UIy1nsKyrVJ+IB9+HNSvVnTp0JoDjDWnemW3k32SHWlmrz3oxysf5wOsQcKGaZLL0J+VL9/ly/d9DwRSaDHGJcZDLlsIMDSvexEMTYRWJQZJL/u24dNZ5lMsVt3mqWSfkCOGxkkv8mk52MxuBEbFhROcR0rydTv9iCBcAllwBvvhn5ECOn9LznSxcl7D37/pvejz1/u6dwslvzofCiluKElXeIua7rLcNHNtjtTSOIsfcqL4i5buwdYgB3zJagDjGCaB58glilDrE4luoD6nMIdnyePr34Py+Iya6yGHwxKT9H/OftQG7A+9nXISZHJhEiMhm2aNxCuLARsWwjfia3Gfu8Y9c9ynHZOOBsxTwLYdAmDqlaj6mHK2zJvW5/MmEmaOy2/W42s3uy58+4rggcYsrXqEnssoi82jjSTPuKSvWJeKHoEJs+djomthejL2t6/LFJ1y3bjZlDLJFIaHvE6uEQS6ZbPNeWyiHGu9h8kcmADjHHdfyRSc8hphG2eGwjk6b1qNZR68gkEN4hFjYyye6fTBYFQ34dzYKqQ0wVeejtBXbfHZgwATj8cOCf/xRv//GPi//23x/46EeBvj7/OuJC6fWQ41xXrutiUecidA52onOgc9iGIotaMvzfHNfxThhUAlhDCmLstZjN+iPSvCDGHyti6hDT/Q2A8LwSBNHYyC6ihnOIqc4hZEFs40b/bYwYPB6TQ0wQxCwcYral+mFcSMbZDkOKWNal+jY9VXwMsAKHWJSl+jal7DZ9V8K6bEv1LfYnoJ8FNGy/m42DyhShVY1d100XplQ/cEZOGwGuglJ97XuLIpNELGBuqELBiyZNbJ+I7cdtDwBY27vWdxdeHGIOMaAcm6zUITaUH8K7W97VXqAo0cwymUilvBkgPUGMc4jx+CKTkkOMt1cDxcfPPmDTpV3h6xCzLdWvVBDjnRm1iEyWnHYpeZW2DrFKI5Mq91wMvpkcVjiHmDEy+fbbwLJlxb6m558vzvzHw3qcAODpp4F//7sWo42G0ushj5JDrJATTg6G06UT1CHGH/940UuOTPpK9Z0AMXmkwI49mYw5MslfUMWlVF/xHDhQj4k6xAiieYh8lsk4nLfwYwjrEJMFsRg8HltBLJ1MCxfrJodYUIeYsvvL4HSxEpWqjEyqBCqTCMLHACtyiNnG8kJEJm0jjLal+jpRKZQjrfTetYq9hhDzTJFJ00QFNSvVt4gwBomCVKpfPSSIxY3SAaC/MOi9iSZ2TMT0scUPSFWxPn9RwRxiQLlYvyfTIywf5BDrHOjE0bcdjbGzxmL33+6OWc/Nsh8/c4ixUn1wgpjnEMt5Y2UOMUZbqs2nTssOMVe6YFJGJit1iMknT9VEJiN1iCk6xIJK9V03ulkm+X0Tg28mhxXOIWaMTMonrOvWmW+Pcw9Z6TnOlYRYOX44nKJEUIcY/7eCWxAik3LcsiEdYrYdYrwjIcaRyUKAINYwzxtBEFoii0zG6byFH4ONQyzmHWLyFxq80MAmyQKK5/rWDrFaRSYtyt2DIpOhSu4NgpFJhFMJdVY9XEGRSQsRJLD3K0Spvs3snjYuQMDSLWghUFmP3WKyBm3fWhWl+lbCrinyyj1/vtlLqVQ/EBLE4kZJTOnKFUWslmQLRreMxvZjiw4xVWRS5xDTRSbl/h9ZIJuzYg7mrJjjvYnf3Bii80jnEEumlQ4xvsAQ8PeHAeVvjNi65A9TZWSydJsnnpnEBzbVdVubX8yS7xcUmaxZqX4FkUl++0EdYmEcYjE4ERtWXBdZ5hAzRSbZCStz32zYoL6dW29skSKTKofYUH4In7n7M7jp3zfVdCimDjHXdX0iFx+tk8W0agSxrUNbcdifDsNvX/ptqPvVHFtBjH/9xTkyGeQQow4xgmh4fLNMNlpkUnXuxs4rRqhDjD82M4dYS7LFJ16pHGI1L9WvNEoXtuTeIFDZzBpYz1L9MAKjC1d5LeatK0wvlkFUAvSRSRuXXyXPc1BkMvJS/YD4pa2ri4/iVjKZAZXqE/GidADYki0KYhM7JiKRSHiCmDIyyR2U+Bcyc4jpIpPMnSUfIGWBLFSBto1DzOE6xKTIpNwfBnCRydK65A/hglt2iKULkkOMLWQ6edi6tfj/xImVzzLJRyZrUaoPhSAWVKqf4Z5HOSJlK/Sp4qRxOLEcTmxL9dlrYIcdiv/395ffD0AsS3G1eJHJ4hjzTt7nEHt5zcu4b+F9uPrFq72/v9fznvehOm/9PMx6blZFHYVbBrfg2ZXPKgUvHvlYEBSZlH/vy/bh7rfu9rloVfxr9b/wr9X/wp/m/Sn045EZyg9h9tLZlU1O4LrA0qXl9yF7XYWJTMqO0TrBn9DKf5MhhxhBNA8+h9huuwD77ht+RXE6b4myVD8Gj0c+VvOf9Z4gVrqQ58UB1cV6RaX61Th5VAJIlb1YskvHuK4QpexRlurrIqjW5e6c6FKJY8smounrEAsTmbQQD20nBKiq0ytMqX4VkxmoRDPluix64KhUn4gXJfGnK18UsViZvjEy6aojk16HmKZUn90uX7DKbzjri7ZCoSzCGDvESpHJZEo4uAL+/jCg/OZkb1Zft4TreH/zzTIZNCsgUBbExo8PjkzqLiCzpX3W0hKfUv3OUvF5Og2MKz7XoR8Xc/yNGhWvLo7hhC/VT5b2gelkduLE8rTpvEtsJDnEvMhkeZZJlUMMKB8/7nrzLsz49Qz85sXfAAB+9OSP8KOnfoR/vlucXCBbyGIwV45RmDjzoTNx5K1H4vnVzytL9Z9Z8QzW9a7zXzQ55cikSgDjvzwouAVc98p1+Pw9n8c1L16jHMcTy57A7KWzvfHz/1fDb1/6LY79y7H43cu/C3/nP/wB2GMP4KaSMy+sQ6xGse5KUM3sRB1iBEHk5WP7U08CY8aEX1FcHWKVRiZj9HhsOsTYOb7QISZHJlFZqb6VQ8yieN+2mN6mQ8wmMmntEKtyhkxVqb6N2GUjpiiXCyPmGSKT8hdlodx0hhJ/28hkkIhq2+llI7QaJ36wif4qyvlV61LFL6lUX4QEsbjhRSaLItakjkkAYC7Vd8yRSV+HWF4SxAoRCWID5RJNFAqA63oXN8lUWukQSyQSQo+YKjIpl+r7Zh9yeIdY6T6yQ8xGEJswwX+haOsQ21KcAAGTJtW2VJ/bfKBDjAli22xTniEyrPOtu7v4/7hxsToRG1b4Un32+jCdzLa0ANtuW/zZJIjFWVhkkUnWIVbwd4jxXV0AsGDzAuH/7kzxtdOT6YHrunj/Te/HXr/by0pQeq/nPe9/uVT/zQ1v4qjbjsIp953i+9DnY5K+DjHH7xjb0F98ftj/PLlCDif89QSc8NcTMJgbDBRkejO9eGP9G4GPDSh/saE6ngPAvQvuxf0L71ffeenS4v+LFxffi2EFMT7WXe9SfdaNyJ3IqTrECk7BO6EjhxhBND6+87wwkzvxxOmLPFNksnTODKAsiG3dWj5us/9j0v8I+L+cVkYmJYeYC1cppMhxraCImK1AFaYo3nEdn8gXtvMqSJiRt2nTIWZaxkqAsy3VD3IOJUSHWCW9bKFilUHiqIVDrJIJAQLXFUGpvjI+a5qR00asND03VKofCAlicYNFJnPlyCSAcodYCIeY1yEmRyYtHWJMqNLNQumDF8QAwHG4yGTaX6pfEu/42KQyMimV6ssfWGKHmMYhZhOZ5AWxsE4qXnyqRWSy9Jj5WSatHWKTJwcLfbqxspkRx4+Pjatk2OFL9VOlH0wOsdZWO0EszvtRikyqHGKe8FR6P8sOKl4wK7gFzN8wH6t7VqNzoPi67BrsEi56+Pc1v265B4wdA9f0rlEKYmEik94YFSLXUH4IA7kBZAoZDOQGfAIgAGwe2Oz9fOr9p+LAGw7EvPXzfOsCgEWbF3kXCfJ2f//y7/Hhmz+MrsEuDOWH8Pm/fx6f//vnkS1ksb5vPfb63V64bO5lxRWx100uJ74OTYIYAAwOlm+LyUWirUOsXjOcEgRRH3zuX9dwrmMiJuI/P4ZMCrh90d+wrpebeIc/P5g6tXy+tXmzeHtM+h8Bv5NH5RBjx3b+Yl2+5hAcYgGRSVUZeUXOGoXTTLmuiCKT/GRgRodRGOEiwlJ9m9kOE0jYz5A5HJFJGyHSMjJpNSFAhRMsVBqZVE4IYBD8UsmUt89M+7Q1aS7Vpw4xIh4ERCbX9633vbnYRWUCCcHqqIpMuq5r7RBjt1s7xPi+JADIl2e6SKTSSJbUKd4hBiC8Q0xxosT2QUrXIWYSjpgLqhpBjJ20qMSnCOAdYt5jC5plkhfpKp09kxxikkOstA8a3SEmRSZVHWKyQCT/zgtjwn2dHFZ3r8a0q6bhC/d8AQAwe+lsTLh0Au5+627fuuRSfV5M8rsIxMikHLfUFfCrRBb+b9lCVijrB4Db37gdU66Ygj+9XuwUW7F1hfB/b6bXO/a+ueFN7P37vXHKfaeI+6z0/5/m/QkvvPcC/rX6X0XxzckhU8hgKD+EF997EYs7F+PvC/5eHAx73eRy4uvQ1CEGlI/PNYp1VwI7meMvllQdYvwJLDnECKLxkR1h8rHemjidt5SO3X/fFzj1+XPxk6d/Ur6NPz9oayuetwHl2KQ8aU8Mzh/k47dKaGC38aKTfE3hupWV6oeJkdkIT8p1hSy5D9qeb10RiTyVOsSsnEpSlC6oj8y2VN/G+QVYlupbiIdse0FOQJveuTCvBX7SCNV+iCoyCSCUaGvaV80ICWJxg0Um830AypHJqaOnIpVIwXEdbOzfKNzF68/i4pJAuVS/J1uOTOadvPcmC3KIhRbEZIdYocA5xJJcZFLvEDN1iGlL9R1FqX7pttAOMf7kSXUCpTupYoKYSnyKABYjSjn8/gjYBj+mSoU+Joip+tWaBd4hlrRwiOkEMSZexOkEXYfsEFO4rXRCmO932V1WyGFR5yJkChnM3zAfAPDsymfRm+3FnBVzAOjFtLyTF2YblE8gcoXyDFYqMY0X0/m+MTa+ZV3LcNebd8FxHZ+IJzvf2NjZ//zjLzgFvO+692HmH2ai4BSwrGuZt37VvuLddVrhkf2dd4jxF1FBDjEmiKXTsXkvs2M5/82mk/C/L/gTZnKIEUTjE5lDLCbiPz+GTaOLvwpRfXnSkylTij8zQax0/uC0tuC5HYGnxnViSeeSWo/YCBMpTc4bdhsvOslfwruosFQ/RPm5dplKZuerIObIO8Ss3V8hHGJBMxlaRSYtZx+0ct0lonHvAZal+iFmAFWtSyWuReUQU67LouQ+TL+bLFbaRCupVF+EBLG4wSKTkkMslUxh2thpAIA1PWJs0nNHJSRBjEUmOYcY/0E03A4xnyBWGi87UAMBs0xqOsSUkckwDjFdZFJ1sRgUmeQdYrWITLqS2GfrEKs2MkkOseKPNqX6QQ6xGH3Dq0XuEFNFJoOEME7I4T94s4WsIGoF/S/3gPEONPkkgz9WqRxhQaLeNx/5Jr547xfx3Mrn/A4xw+OTH39/rh+ruldhdc9q9GX79PdR7DtdNNU7kWWvG3nmSJUgli6fkMXZIZZMJJFkgpgiMkkOMYJoLlSTJ1VEnM5b2Odq6fArnFtzx/JnVs/FTz/Qh9sPAFavWSDcfv/ueXzkq8B/7v869vzdnrht3m3DMnQVqsi7jK1DzFeqn7Qv1TfOzhfQi8X3cKnWFdaFFCSmeOMK4xCzcaSpIpMWIkiYfSWLlVWJeabHZ9shZuEWVD1/qnUxbMQ1204vq5J723hpSIeYsVQ/aJICcogRsYBFJgtFhxjrEAPKsUm5iDnIIcZ3iPFusCCHGLt/NA6xNFJeZFJyiAVFJiWHmDIyycqZWal+6TYnjEOM78nSCWI2DrFaRibDOMR4ka7ayGQTd4i5joOCN8tkBKX6MeoA0eIW3235hKFUP4RDTHb4yM4ok9hk6v2SjwX8scqqQ0wS4Db1F7+N3zSwSRizIOLpOtO4scv3Ve0T5f+K+8rrDxWZTCTKAiwviMXkIpF9uZFMJJEsjUlVqi+8fmiWSYKomi/e80UcccsRlUcRa0zkkck4fAHFBLHSqbow6zI3C/D//uNU/GLnVTj1ROCgFecXj3ml29+dKB4f39r4Vs2HrUMVeZdhF9/FY3zx/MkniHEOMbZMqCJ1G8eWQXQRupeqjTAG9EF54woRA7Qqplc4p3nRpSqHmM6FVMW+4md9NAldQPAECzbuNn4fyMvJYqVOZFSJSibhyaqbzhCZtHodS71fgbN7cvtK+94ihxgRC6TIJHOIAfpifZ1DTNUhxtxgyUQSo1pGCX9jeIJYa0hBTOEQU80y6cVkEpaRSckhZirVTxVKByPmEGPva1uHGH+haOsQGxwsi4Eq8SkChA6x0t8CHWJRRCb5Uv2YXEQPNznuQ8MJ6xBbv95/e4xmidLiup4ICKidS+w4UXALcF1XKe6w/+XIZGDvmCFuqXOPAZIg5vgL+YNK9YUyf50AaBCzvO1KcctAV5nuvtVGJoHy640do2JYql90iCWFv/GQQ4wgouXut+/G3FVzlRM1xYFGLtXPlk7VlQ6xlhZhspZOtx8ru1d6tw+1ipdtNrM21wpPEEsZBDFOLGMX60aHmGWpPu/AMcXybPqg+MdgFZMziC5B2/PWZREDDCNuBJbqB4ggttE9QB+RtZn9UnBGldbjuI5ygiUvomkTVbWIX/LXyPxyujhrJfHZsJFJq3XZuPcsI5M2XXFUqk/EA8khxjrEAGDamGJkUpiZBgaHWEnQ6smUO8SYG6wt1VaeRVJyiLE3SdUdYvlyX1kiXXaIMZQOMUVkkr05PYeYIjLpWbflDjFvIc2FXz4P9JYEQ5vIpOqkijmx0ulitLAWkUmhQ6w0FCB8ZJKNSb5fUIcYH5mMs5BTA3hBLJLI5EhwiDmOF+sAzKX6gCjkKDvEdMsqCvjl/00ilnySMZgfFJaV45b8saPgFnzbF7q8pMik1s2mWIcs4mndZEH3dfxCnOAQkwUx5hjjo5Ls9RbzyGSKCWIKh5j8+iEIonIKTsF778XVcdnIpfqmyKTbkvacY9uUTqmXdS3TCmL1PB56yQxDZJK/jV2sq2aZrKhU32JWRBtxgx+bSUiwEemsI5NBsTXLyGTYUn2tQBVFT1WI6Kiv00vh2Aoq1Q/1WkgktO4on3uvmskTNAKcbj8Yo7gh+t3ClOrbRF6bERLE4gbrECuUOsS4yOSY1jEAytMZM7QdYqrIZMkN1pZu85xZQR1imXxGOJhrkR1ihXL4JZFMeQ4xBjug8A4xZYdYQKm+sE55lskgh1hPWSz0RSZV91GJQfJsjhFfbPLfnvlmmbSNTAZ1iOnGSg4x4YPFqWaWSXafkdAh5rrIBzjEdKJPNQ4x1f9at5Xjn2VSdogFRiZNXV4aEc9zxNm6yxQCYChnWlQOsTiX6idS5Q4xxbkYOcTixWBuEL9/+fdYuXVlvYcSe9b1rsOZD52JN9a/Ue+heMjH8jjSyKX6XmSS+wKHnR/k2lq8872ZpfmzlnUt824fbBEPkHFwiBk7xDj3mNEhVkGpvtXsfBFFzWwK83lhxoUrnJ9U4hALU9weRam+zT4A7Er1A/eVHGFUOLZCTbBg6XpSiUG2kcmwz00iURbFbKKVRvFQ48oLW6pvem6oVJ+IF55DrHjxwkcmdQKWziGmjEyqHGIBgpgL1+6kKaxDLGHZIVbaJ+zNajoxSukcYjpBjDmgRo0qXjjyF4q2DjEWTZw8mQ24vI4I4B+vr0PMNjIZ1CFm4xCL04nlMCJ8ULP9aOsQ6+0tRmr520eCQ4ybSAAwCzWAJFQp3F/ysjrxTOUUk0UstgzfAcaouENMsX3ZIRbk3OIft69DLEgA5IU4W4cY7wgD1B1iwIhxiLEOMScB37ioQyxe3LPgHnzr0W/hp3N+Wu+hxJ67374bN/z7Blz70rX1HoqHfHyKI83qEBvsKItL+24q/i84xEqC2KhSp0E9BU2rDjHuNnZRr5xlUrqwr6ZIvZKi+KBOr7CRSXm5sB1i1g6xZDl2KJsFQs9kaDEmIMJSfU4sNTrEIuhuMzmorCOT/P4MKbRWFJkMGavUPT5+P9g60poREsTiRjIJJwFsLQlifGSSCUfyB0TgLJPZXu9No3SIyaX6jiiIAZaxSVWpvmKWSQYT8PhvCZQdYkwAkmaZlB8vYHCI6QQfvj8MEC8U+fuYRC5eeJLXEQH8iWDVs0zKgliKTZ8YwiEWZ2dTDagoMjl+fFmI2LChuDzbxyOkQywX0iFmdFtpltW6ykwdYtwHvXzs4kuKrWaZNMQf+e3wkUk2DtOEANb7ph4OsTiV6rtcqX7pyFYIEMTIIVZ/tgxuEf4n9PRni++7gfxAwJLDh3yMiSORd4jF4fO2NAbWITY42Avsthvw5JNlwauteAGdQAL7lE4teUFssKS3jMsXV1LP54+dmxo7xEI6xMKU6ldVRq4RXbTCRcgIo7yusLNMmkQs3cyJgsjjiiJPkMBo5YALIVAFdaklE0nhGo4fVyWl+p5IZ3Bs8WMXtqdziJnESkuhNYz7S7c9GxFL7luz6j8LiF82GySIxY1EAj1t5R4VPjLJ3vC2DjEWmXRcx4tZhnGIsYgmYCmIKUr1vchkyuAQC4pMslJ9KTKp+hBmgliSCWLsBp1wpBPEZIcY6+QxRSaHzSFWxDVtI5cri1mqmS/Z/UyPC1B3iMXhm9ZhpKLIZCIhxiZ54WIkOMQcR4hMWnWI6UQtw7LsW03+Pq7rCmKTztUFSJET6Xdf7NGwLpVAJQtguhikShiz3jfVOMSiEMRiEpkUOsSCHGIxjXg1Cmt61uDBRQ8aaxLY8yEL0oQfuT8wDpBDrE5Ikcmh3CCwbBnw8MNlwau9eGN7qg27lfRmwSHGBLHSSmIfmeQdYjYdYkExuRAuHatYpSRcVBOT48ckL6dzIRldVjZdXboeLogiT1D/ma2rC7Ar1feEGVcTFSxFClVClq1DLJTryeBuqyjOaim0Br3+jJNDKCKvWgFO2p6NaGt6bzUjzfmo40wiga6SSao93S44pnSOLnnWRsboltHeQYD1iFk5xEpvEl40q8ghls/DSTCHWMquVF8VmZQdYiWBSGXT9maZLP1u7RAbP760MU1k0uSk0jnEohLEZIcY737TCX1MpEskgIkT9ZFJ9rhsZpmMScxquMk53DdJpk46WYwIEsTi8I21DlVk0tIhli1kxdJmRxE/lIQq/qKRF4DlHjBZbBKmrYd4nFI5wvh1F5yCz6FmU6ov7w8m4rF1qxxipn41330N+8Z1XSxId+GUE4HFrb2VC2IxeS+Ls0zaRSbJIVZbznz4THzqr5/C86uf1y4ji8G1pj/bj1PvPxX/WPiPYdlelMjHljgwEgRmnyDWCLNMypFJlL405o7dniCWbseuXcXl3u16F26eCWLFx+E5xGIQmYykQ8wyMqmKFFq5b4KcQ1UUt/OihC4yGdYhZjMbIO9mAgyONEtXUGiHmE1kMijip1iXdYeYTWeZRf+ZLs5qdAtaPDeAXhAThMgwkUlLsdL0erd10zUbJIjFjWQSfaXrFxZ5ZARGJiWHWCKRKBfrl3rEwjjEWlOtniBXkUOsUCgrzukWpCQbpsohpoxMhnCIJZkgVq1DTI5MmoQj2SEWdWTS5BDTbYeNaeLE4th1kUnmEFOtw3XJIQaNQyysIMY7ylipfpz3oyoyye0HvsvLu90y5siLPL77BnSVyQ4x+bgUukPMNrooPSb5vrJY6Ht8hbIjruAU9PtK9Ts3joJbwJ/GLMEdBwC3TZNeV7YdYnEs1U+mvBNWRzFZiPx6ImoHm8Vans2ax3OIFYbHIfbMymdw+xu3Y9bcWcOyvSiRHaRxQBb744gsgJkmUzISE/EfQFkQY9+vJkrxSU4QG2or3tjR0oGdtxaX68n0oMstfvkzmGKCWPFxxcEhFrZDTP7cdlynqlL9SoQE/ssYfpym2QBthBldkTrvQjI6mhQuJGP8TReZtHSICftKIwTJ+0orqISJl0pCpGrWR/m5qaSzTHZQVROZjNQhZhGZtBJ2qynVD+hbazZIEIsbiYQ3y5ZsWwyMTCo6tZio1pPpEe5r4xDjBTGraISyVL/0sAwdYoJDzDTLpNQhJn8Ip5NpJBz2pi9ScYcY7xBLJs1ikOwQizoyaXKIAWpxhu8PA4IFMdVYBwbK627qDjHuJIPtAyZEnHoq8IEPiPG1IIcYuz3O+1GKTMriUlDXlUlMCuofk5c1bdcXmcwNau9rLNWXXBxKV5shymgS8ZT35faVMZqpcJsNJIqPYSCRb0iHWCEJcojVEbavTV+CDXcMkFU+9GX7hmV7URL3yGSchDqeRo5MZrlT9aE0RIdYa/HY3NEyCh15YHrJpL8sXfxhiAliuRh0iLG0hqFDjHcwaR1iXKl+YIeYyqVTjZAQEJlURgpt3WYaF5LJFRQ2BlickMY/wQIv8vDr0gqMJhFL50IyiEE2QhCgFtds9qe8H1RCl7yMsD2LyKSxVN9yVkubUn2bWGVUDjGbsZNDjIgHBkFMJ2DpHGJAuUfMi0wOu0Os9LAimGWSvVl1H8KpRMoTGap2iMmCmCkGqXOIDUeHGGAWxOReszAdYiwumUwCo0fH68RSYvPAZry75d2arFspiLF9/n//B7zyCrBihZ0glk7HRowwIkUmZWdWUNeVyeVkEoxsnFn8CQ+7UGbIkUmTuMa7vLKFrDBLU9hSfZ3jTfmYCnoxTd6OcoZOlMaIQjhBjH1hEaNSfYpMxg/22jS5v7xlhqlDjD3/sgA+EhjueKkN8jEnjhTQ+JFJABhsgeDuZYJYe7odSCa92OSyluL59WCquI5xufo6xHjBxTYy6XWIybNMcqX67HyfXXvoRHBbIYEtU3ALwpijjEzqRBCryKRhezYiHaDu2JIFOO3MkCFEHpPLit9mGMHIJjJp4/KrRqy0jbPazAAatlTfJHZZibEW+9O3LirVV0KCWNxIJj3nj6zSaiOTBocYmynSi0xqHGKnP3A6znn0HGH9oQUxpUOs9AZLp70LHoZqlkkbh5iutyCdTJcFsdLfjC4qoBwJZIIYf/JUqUOsRrNMJtzi47KaQTOo14ztD1NkUo5LxljI+ehtH8W+f9i3JrOe8R8aDjtisv3HBAiVGDGpNENsV5d4W8SCaU1wXdEhFiBqyQKRbURSXrfPTaWIPVYamcw5OeEkseAW9EKcwpnlE+o0rjaViGX9eG3cdCi+9nyCmG1kMkal+uzYVizVtxPE4tp5VCseXPQgfjbnZ8aS+0rIFXK4d8G92NS/Sfi7TWG+7Hp6Z9M7+PFTP8bWoa2RjlHentwZOBKIvUMspu8n3yyT1TrE4vB5K5XqA36H2FBrcbwdLR1Aa2tZEGstHr+HksXHMTbvdwUNJ/xnqW1k0uQQk4WE3SftDgBYsHmBd+wLLVxIpfOqCKMsutjEE01CiW5d2lL9Qg7vbHrHS/GEieXJvVja7dmW6lfrQgrZt8avSxVhDOqTs9pXmudZNaslW5dVZDKkAGfqI7OKVYbcnzb9Z1SqL9KcjzrOJBJlV5UkIGkjkyaHmByZVDjEFncuxs2v34xrX75WcCi0plo90axqh1hSUapv2SHG3pyeQ4y/iOJEQEEQkx1i1UYmTSIGE59qPMtk6YtBO/dbFJFJ5hAbN6604Rh90yqxZMsSZAtZbOzfGPm6c/wHJx+ZdN1ydFIlRrCuMD5Oyfc3xXA/esgdYoqeLJ0IFCYiyX4Xoooa4Un1u3yBzDtI5IikfIEv9IBJvV8qp5auVF81SYC1QyygM00lxGUTJZeBWxA7xEZ8ZFLfIdbMDrHvzP4OLn7mYszfMB9A8fFHIY49sOgBnPS3k3D+E+cLf7fpB5NdT5fOvRSXPHcJ/vb234Tl1vetN3aRAcDSLUtx6dxLjXHIEe0Qk2LZcWAkdPI1Q6k+UBTE8tkhLOlfBUByiPGCWHupQyxR3A/MIVav549/PkyRSVWHmNIhJrmQ9pmyD1KJFLYMbsG6vuIxRC4/t5lBUujYUji2TGXrvnVZ9kaFcSE9v/p5zPzDTBxw/QHasVvHPXWOtIRdpJBtj3fL88tU1OmlE4xMkUlZxLJ5biyigsL2+C+6KynVD4hVhirVt3Dm2URe+e2ZhLrAGSvJIUbEgkRC7xDTRSZNHWJyZFLhEONPMofyQ9E5xLJZMTIpLW49yySLTEqzTKYSKUEETCW5yGTpb4EOMXmWyThGJpngWdqZCfZtmckhphPE5MikafZM5hBTzcAZIwpOwXt9Ru2iACQbepITxPjXlEqMYEKEfNtIcIg5jjjLpMohpotM2og6plL9EA4x+QJZcIhJEUn5GCZ0iKkej6XLS3Vfo5hmcJfZiIlZ5hBLOH6HGBPIRlipflEQK/0tyCE2TBeAuUIO89bPq7zMOyKYu7s7042h/BD2+t1eOP6u46te79retQDgXWwywjjE2PlEd6ZbGCtQPC4feP2BOOD6A4zP2c+f+TkuePIC/GX+XwK3J0ekRwJxd4jFaVw8DdkhVjre8h1ig2nggmlvY89l38UjewCDLSWHWFpyiI0qfn4NMUEsW1yuXs8ff1ysepZJuL4L+/Z0O/bcZk8A8L4M0LqsTMIMJ8ipIoVy+XlFsTwLgcrXIVZa19MrngYArNi6wjcuawFO4ZTTOcRMQontvrIp1bfpzhLGrhDzQs1qGdJBZRIrbUQl/jk2RXGrcX9FOgNoBcJus0GCWNxIJrUOsbCzTAKKyKTCIcYzlB/y3nDWgthPfgJceaXfIZbJlPvQ0i1WDjFTZJId2ISLKM7aGYlDjL9QZIKHKTKZyQB9pW+2axWZrMQhFuRaC+MQkwWxOJxYcvAXSbW4eBU+qHmHWFB/Ey+I8UJFTPejgBSZVHaIaUQvK4eYxkGlihvKsUdBEMsZBLGChSCmcYgFxiANPWBBYlpgZ1qA8MgcYkpBjBHUIRYzh1gqkfIcYoUqO8R6Mj2BwviZD52J4+883ni8uOS5S3DQDQfhzjfvNI7/0rmX4pkVzxi3N5AbwJqeNcZldPBxwRVbV2BZ1zI8vfzpitbFw8Qs+XzCqlTfEUUedl7BOz/6c/3Y0L8BmwY2eQ51FSzm/m6XvgOSF5UqFkbqRBwFMfmYE0cKKFdFAFU4xGJyrOPHIEcmF7QVz88XTgaGmCDW0gG0tXGCWOl9KTvEYhCZNApivEOs9LNPEOMcYvw5/f7b7g+AE8Q0woVNHE1eTttTZSEY2QpUNtuTCVV+bii5l0v1gyYp4LenW1eYUv2w8UtVhDFoe0Knl+W+Uu0HXam+Td8aIB6bwpbq2zgd+e25cIXPwJqU6pNDjIgFnEMsylkmfQ6xVJsgRDFCO8Q6O4Ff/hI47zzgvffE2zKZstst3YKU9HJTOcRUkUlfqT4nAGojk6W/WTvE5Mik3CGmc/UwJ1YqVTMnVaBDrFAojnf1av+4giKTzCGmGivfIcavIw4nlhy1FsSED07mEMvlxLiaKTI5Eh1iqsikpUOMF5pU91UV1vPiki6aqFq3fFySI5OmvrGCo+8QCyzVd6TOtCDBTyMABj1epdtM5xDjSXMXKKYOsZgIYkGl+rYX8PPWz8M2l2+D8x4/T7uM67q48d834uElD+O9nve0yzGBZlnXMu0yr6x5BRc8eQHOeewc7TIAcPydx2Pna3b2XFlh4N1R7FhnijPawt4P8gUS29c2kUlZCOPXxTvMTFFHdt9V3au0y/DrtXKr14mr/nUVrn/1euFv/DEmLowkh1hr6VvVRnKIyaX6fcniuURfa6lkH+XI5JTS6c2W1uLjHyzNMhwnh5ipQ8w0yyT7XXCIce4UJoi9ufFNbzmGbcdWKpnyLvCNLiSLmFxQD5cpfqnbnkxFEU2LUn0rUUnjEKtFqb7JbSbvTysxL+y+0sVLOXFNG3M07Ctdqb5RaA3qpuNijr6x16BUnzrEiHjAzTJpHZmspEMsrXeIsYNOS6rFE6i0J8e8K4EJKEwIGBoqu91SKU/QYSgdYqrIpKZUP5VIhXeIZbOik822Q0x3UsWcWJMmle9bc4dYSRBjCzgO8POfAzvuCDz4YPFvtpFJU6m+ziEWMyGnP1d+PvkTpqgQbO/sLRnWIcbfFqcTdB2qyKRBqJEFJf4C2CoWqIkfZgtZ0SEm3V51ZNJWmAoQtcL0nhkjogbnHftdcIjlNW4plUOMHavjVKrvakr1K+wQm7d+HvJOHq+ufVW7TM7JeccJUyxQJxjxdA0V7RssMqhjweYFyDt5LxYTBpUg5rhO1U4p9vh0s0yHKdVX7Sv+Z1MZPtvO6p7iFzo/furH+PhfPi48z8K6Ytoj1jXYhXMfPxfffvTbwnMTR4dYPSLIYfEEMbfkHK22QywO5y0ahxgTxPpbSiX7KEcmx5Seqv60AxfAEIrLjq13hxj3Gjd2iPGzTLIOsdJ73hPEXH+pPgDsN3U/AHqHWKBwYYrJWcQc+W1adT2ZOr1COsT47eken8mFZFuqr4oB+sYuu5CC4nvQF9P7xMMwkckKRCWfYGR4LbDlwohYQWMP7BALGZmUx2Uz4yj/GK3cdBSZJGKBoVQ/0lkmo3KIqS7ImKOId4il0lYOMWVkUnaI8RdRfIdYwqJD7IMfBPbYAxgqPR7dLJO2HWJyf5i8jgjwO8SK/wuPbeHC4s9vv138XxeZdF3R/WaKTMoOsZgKOf3ZsiBWk8hkgTvJ4B1itoIY7x5Lp2PrtBOQZ5kMivZJIhfv2lOKOtIFGX/RaFpv3skbL7RDRSZdfR+Z/PiUPWCG3rNAcU0jplnNOskEsaSrd4ipBDH+tpi8Bm0dYrYX8Oz1YHI38a8Dm+WqFc2EcRnWpaLgFLzPu8H8oPC+qlZgUY3ddV3v9WZ0iHGOTsd1vMelFcQsHGKru1ej4BRwxb+uwKNLH8XCzQvV64rRTJMvr3kZ17x4DRzX8Vz4eScv7Ds+Dh4X5C8dhoO3N76N+xbcZ7287BCr+LM9Tuctmg6xvlTxPd7fWvwdKDvERpeennwSGGgB8iiuI04OsXTCLjJpdIgpLsaZQ2zBpgXIFXL2woUuDqno9PIJMyYhwTYGqIhM6lxkMsrtVRmZ5B1GplJ9oRtLIRiZiumF/cC5kEzdWfzYK4lM6p4b/rHLQqtqP/jcdCF6uAA7odXUpWazjG2cNbCzzDDBApXqE/EimdSW6lcyy+SO43cEAMxdPReu64ZyiLWmWr1lKhLEBIdYWtshxltBbRxiVpFJnUNs3jxg3Tpg/fri32RBTBeZDHKIMScWv46IBDHPEafrEOP7zgZLFwu6xwWIj80mMilPOBCHE0sO/iKxJqX6gkMsRGQyyCEWh2+sdUiRSTl+6OsUk0QvXqS0coiVfi+4BZ/44esQ4yOTBSkymRMjk0EOMf52fsxKYaogPX5D75l8wSlEyQoZ70THcR3fxXOgQwzcRdWQ4ricSJTf14BfEIttqb6dIGZyiNkIVIIgZiF2VSuaAWVBKGzUkX8cvEOsknXJqEQsU8RYN65cIeeNhd8P/PhM62K3re1di2Vdy5QOtbg6xL796LfxndnfwUvvvSQce1TCYJy6unTPeS05+d6T8Zm/fQaLNi+yWr4AySHWoJHJIU4Q62sFBtPFcTKH2Gju6ekcVf6ZCWJx7BDjry0Eh1jpQpwdG3iHmHxhDxSvX8a1jUPOyWFR5yKtcGG6+Oe3IzioLCOTlcQArSKTGocYvz0b1xq/Lp3rySiuWUQKbRx3/DbDuJCUs0xW4RADpE6vkJFJ/vGZXle2Udwggcq204u/1jXGWS3EQxtBsxkhQSxumBxiFcwy+am9PoXRLaOxuHMx5q6aG+gQy+Qz1TvEmIAiOMRSXiSG4TnE0uE6xKxK9Ut/8/VssQPf0BDQ21v+3TTLZCqlF7lk0YhfR9SRSeYQK/1deGzseWCCGCvQHjVKHBMgPjabyGTcHWJcZLI2DjHuw8fkEMuWLjKCSvVjKiwKOI7oEJPihyrHGH+RxT8nNiIP/7vgLlMJUbyry+QQk8Uz6Rgm/y672owzRXKur6D4qDwOXniTH4ONeOhFJlPwz+wLiO4wYEQ4xFLJlCeIFZIwCmJyqSyPJzxZCF2AndhlEtfYc2dapuCURV7Tcq7rYlnXMuHkXHZG1cIhphOebDrE2HJRRCYLbgHPrnxWef+4OsTYhACdg52CUKfap7FyiEmu1OFgY/9G4f8gfB1iDVyq31/6trO/BRgqneh1tBQFsbQDtCWKx/TNvCCWi49DTHY78V9sh3WI8ef0iURCiE1qZwM0XPzz46s6MhnQz2SMAWrGJKOLTAozGYZ0iNmW6gv7QSVQSQ44Y4dYSDedaV/ZdLfZOraqei1YxC9t4qzyNm06vXSvP+s4K5XqB0KCWNzgSvV9HWJcZJI/2Bk7xNrG4n9m/g8A4ObXby4LYpYOsYoEsbHF3jJ+lskEEn5BLCFGJtPJtHK2Gp9DjBMAhchkkotMyi6qQkEs1h8cLPeHtbUB7SUhLigyKZ9UMQFqFHemUqvIpM4hxj82doFsEsRct7w8u3i2cYjFxFUiU/PIJP/h00wOMb5DTCXycPslky+7ngCFqGUQeWQ3lixMmYQ42S0il+qbHGL8duTfTcX/3mMo3e5zecmPV4pMmrarEuK0HWLVCGIxEbf5k0fbDjFA7xILHZmMyiEWwfb+PP/P2O3a3fCbF3/j/c3kEKtaECv4RSxB6LLoEGM/e24zp/LIJAA8ufxJ5d/j6hBjj3swNygIdfLxABg+4WJ513Lscs0uuObFa7TL1KNDjO0T20kRGtkhJkQmW4C+lpIg1goMlk70WGQSAMYki+enTBBrzQNtuZK4Vq8OsdJ5uOxeAcQvtm07xHTuFH6mSV+pvsWMekCVPVU2rieL2JqtQ0wXyzOtS7U9Xal+UExOKa7ZupAUfWS2bjo5um+1PcVz4xu7tK7WpGKWSYtZQgHFvrIQ8wI7xAyRyTBdeD6Rjkr1Q9OcjzrOJJPeIV83y6QLV3jjmBxiAHD6wacDAP7vnf/Dpv5NAIoiFB9VZAzmB703SUWCWGurslQ/mUj6I5OSQ0zVH8buC/gdYqmkoVS/9Ddvk45UQM0LYixWCARHJmURgwliHdzYI45MBjrE5MhkLlf+nQli/EmG7H4DRrRDTIhMIvqxCdZx9nKrplQ/Tt9Y65AikwW3IFzsmlxd8u+mzjDA77CSxTT+WBfKIRbQISbfV3C1GYr/2br415oclzI5xOR95duu7J6rhUMsJqIsu8gNnGVSOnHTxYSYWGIbmbSJ8tmsS/6SSjWmoHW9vbHY/6jrzhrIDYiii0Gwem7lczjvn+dZPT6d8GQS+eTZJFWzTIZ1iAGSIKaLTBrW5brusDpm2LYG84Pa55n9HMVECDbMXTUXK7auwP2L7tcuE6ZDLKoxyzOSBhGZQywmxzp+DPxna3cbPDd2XyswyBxi6Q7vXHp0ongMZ4JYex4oTTpZ98iknNQAxHN50yyT7MLchbpUHwB2Gr8TAGB933qtQ6wSIUHn6jLF5AJ7uKTtLelcgkuevQRdg12BHWJsn9kUt9v0n+lK9SuKX1ZQqh8UO/T1kamK4i1dT7zIo9sPJsFI5+oKjJdaRGMDO71s4qUGZx6V6kcHCWJxg59lUhOZBMSTCpNDDAA+uMMHsc/kfTCQG8DDSx721iVP5QqUy/cBURDTnnzLpc6jR5djeHxkMuF3iLEDBXOIqeKS7L6Av0MsmUjad4ipHGKy4APoZ5nUiVyyE4tfR1SRSRuHGBP7BgbEi2TbyOQI7hCreWSS/2aOd4hVIojFqL/JiFSqD5ijfUZBTFXALzlR+OfNF7c0CFNhZpk0LesbsyT4yZHJ0C6vEA4x0+PNFXLIJhsvMhmmQwywcIgZxCKdk0cmTKm+aUy2vV+qnjHZGWXrEPvpnJ/iyheuxBPLnggcu07EMj1uWVBR9ZHx9ze5uvh9yMfpKino/8Sdn8Au1+ziiyXXCvZcDeYGtU5A+dg3XGOydfiZxnTbvNsw4bIJeGr5U97fKu3pDO8QK573NJRDTBGZ5GOQ/S3AUOn4ziKTADAGbcKy7XmgpVD+gng4hFYZ3RfTgD4yyX72RSZddak+IH4h7nOIWZSf89uxiTDaFrcLy2gEo/OfPB8/fvrHOP+J87XiBmNix0Tt9oDitdmGvg3WY6+kVF+3LlkwClOqn3NyWLF1BZZ3LRfWZeOyCnI9CR1bmk4vG3GtktdC0NjDlOoHvq4MTjndJAW2z41pAoJmgwSxuGGITPLiFX9Q42ddVK8ygQsOv8ATjxJIeDbkCe0TAAA7T9gZANCT6RG2F9ohNmZM2XWUyQh9aL5ZJkvjGd06GkAx3qkcP7tQKr1ZtZFJ0yyTvIsKKApi7GJy9Ojy33WCmO6kSuUQizoyadMhxjvE2ONKJMoXw5WU6o8Qhxh/8VOTUn3+myu2z20ik8wpORIdYo4jnLQD0iyMBb2rS/5dFnlkgUi+wJUL+WWHmMktIpfqq06MbLbri0gGxB6N7rIwbjqVACaLawnOZWAjiLW1ib/HSJQVBLHSAa1gEZnUxYTCOsSqjUwKvVGa5XRl67rldIJYmFJ99jnOf8ElEzQzpG2pPt8hZnJ1DeWH8PUHv46HFj8kjkPzOLTCoMEh9tyq57xy/uHAi0zm9ZFJ3f6t9ZhMz59th9hTK55CX7bP63b7zmPfwZ6/2xPdQ92hxuS4jncctxbEmEOMCWLVOsTi8HmrcIgJglgrMJj0RyZHQ+wQ68gDLdzuqIdLTPhiOmkZmeQcYYDUIaYo1WfrB4rndvxnOF86H1Sqr3J/6Zw82UIWs5fORudAcQZ5m1J9ncOI8fr617XiBkO+zuG3BwB7/m5PbHfVdoJTLspSfRu3mSnmKCzHuZB6M73Y5ZpdsOu1uwozhYaJTAZ1iFk5tgx9ZLaRSZuJGCop1bfpGePHZXpubNbFawmmsTcb+rlyifrARSblD4Z0Mo1kIilMcw5wDiJNZBIATjngFJy070nYPLAZbak2bDtmWwDArSfcijW9a/Dg4gexYusKnyDmzTJZCBDEdtwROPJI4MMfBv75z+LfMhm4peO5yiHGPkQP2u4gfOPQb+DwHQ9XbqKiUv0gh9jQUFmkUIlZcmRS5xAbjshkkEOMF/sGBsReM/Z4dJFJU6n+COkQ4y8Sh7VDLIxDjC/Vj+l+FFA4xEzCTTUOMdnNYXJqycJUUFG+rVMoaMxhHWKmCQdCO8R8kcnie5UcYn5sZnOMslSfX5duOV3Zum9dBb+oxI8vjEPMRhSxLdWf9dws5JwcfnrkT73b5PiwqidLdnU9tfwp3PTaTXhjwxs4fs/jfWOV0bmYjH1kpXXxAnUt8SKTueDIJKAXcqPEc4hZRl5NY2L7k50X/v2dv2NN7xrM3zAfR+x0hPWYbJ2HPOUOsZJQXqkLKibHOn4MWYNDbDBZfJxslkkAGOOkhWXb80Brvvx4soWsNmFRK2wjk7zLabsx2wnLqRxi8rrY+b/jOr7IpHXUzKJInY3z96/8HrPmzsJ+U/fD/LPmK501bGIXdg2jE3kYu0/aPdAhxm7nt8f2reM63nvwX6v/pX18KnGDrYsXglzXFfarauxWxe2GyQXY+uTzNV9hflIxy6SFEMSPixfz5C8zbfrPtI5CzevKFK20LdXn3VjWXXhVPDemKC77vdlL9UkQixsGhxhQjBcO5gfFyKRrjkwyRrWMwo7jdxT+dtwexwEAHl/2OIDyiQ8ryrR2iI0aBdx+e/Hnp0r2+qEhuGNQXp/cIVYS8FLJFH7/id9rx+0r1eciotrIZOlvxg4x5o5SxR3DOsRqGZms1CGmGhNgH5kcKQ6xYYxMCg4xXhDjHWJMgGigDjHALNzIF6BGkSfgvrLbytchZohMyh1ypotnn0OM2262kPUVhFuLeAGl+qZ9pXSmNUGpfiqREkv1AwQxbYeYRWTS5BDrHOjEpI5JSCQSoSOTuuVq5RCzEepsBL+CW/Au7uSJMgZzg7jwqQvhwsV3P/hdz8XNb7sv26cckyDm5QY9VxH/3sk7ea3zJ2yHmOM63viHIzJZcAre2E39bmH6uqLA6+qyjbw6BmG19Bwyp2Fvtlf4e9gxAWEcYsVjQKvTgA4x7lR9ExdQ6GsFhhLFxyk4xApJIME5xHLlyCQwPEKrjFEQ4yOT3MX3ThN2EpbjL8R17hTv/N8QmQzqqbIq1S+NhV0DvbnxTW+7bByykOAJYhpRgrHrxF0DO8TY7SoxiH+/TR87HYs2LxKWUUVC5fgb30FdcAtIJ8ruPNXYBXFNJyoZOrbY+oXbFc+zUeQJUarvLZdTO8SMDjjL7UVZqq+KTAZ14an61mxmvuTXpZyRs0XcD1SqT8SDRELrEAPUdk8bh1gQTPhiJzytqVYkEgl7QSzNHfyY0DQ0VO5DK33bwRMk4DF0DrFUQuwuCD3LJLuY1Lm72PI2HWK1jEzaOMTY8xD0uNjyQaX6hQLQV7rQiXuHGB+ZRPRjEz7s+FJ9XmANU6o/EhxiishklA6xSh1TQaX6MqbbfaX6fFQzYFZNX2RSuq9ppsjQbjrZIRZFh1hMXoN83D8Kh5gs8piWAcQL+1fXvoqpV07F92Z/T1guUoeYTYeYRggayA1gIM9FJg2Ch02PlGrssqDVn+v3jqk6MY53lZscYkwI5veBzWNQrUu5PLcu+T1WC2T3nu55HvbIZIQOMfYa6c32wnEdTxjj97Xrurhj/h14c8ObgWPi1xmEPMtkxV92xeRYx8ZQSHBfZkIRmSwJYnyH2OiSXbuzdErXngdSBdc7/x3OiSQYuvNwQO8QY9UsDN4hpusvYutWOcRUQhA/tjCl+qoZ7tnY2LqCitvl2BqjLdUWOMsku10XuTM+vhAxR99yIYriAzvENOIhP26fgKMoig/rEPPtBwthUFVMH2aCBSC6Un3d69ims0z7WqhkRk7pNdNskCAWN5JJT0RSqbSsWF+ITFo6xEy0p4rCFzu5ZQeNigQxRal+MpH0RSZtVWifQ8xVdxcYHWIqQUzl7uJPnlSRyTAOsXp0iMmRSXlMgBgH1TnEernumZg7xGofmeS/KasgMum65ecknY6tsCgQVKof0KkV6BALIRDxJxKq2R5N2JZ5q7YrW/3511agqGWIPVZdqt+wkcnS3xQdYr5ZJgM6xAD9RaLO1TV/w3w4roPX17+OvJP3Xndx6RAbzEUbmVRtR4626TrL+OeDF8RMY2eisW1kNeyMlfy6hiMyKY9J9zzb9nXxrO5ejX1+vw9+/7LeOa/DxiFmW6rP1tGb7UV/tiyO8s/hGxvewCn3nYKvPvDVwPUAFcwy2WCl+vIXTbwglksBPYni/hEik/mEsGx7vrgu3cXvcCCch0tfxmsdYuNFh5jQIaa5GOcn1fI5xHQzGVq4dIIijKrlrGd9lMQg4fHBL1Dx6whclyJeahOla021ej/fu+Be3+MzuemqnfWRv11e15jWYoyI77uURaWpo6cCADb0b8Cm/k2+sVtFYyOMTIZyiCXMDjFTN10oAc6wD+R18a5O0+yezQYJYnHDIjIJaGaZjMAhFokgZlGqH2asskNMG5nkLLrGni0gOFood4jpvmU0dYgN5yyTQZHJoA4x+XGx/rC2tnIpd5y+aeXgL35qUqrPnxjYRCZlQQwou+1i5M4xEhCZlKOLJpGn4BZ8F2DWBfUFRWTSsk8ICHCIBUQmjRHJfICoZehMCyzkD+hMY58P2TTgDigu/NPSN8qyIBb3Uv0kKu8Qs5hBUifIsOelP9dv5fyS11Vth5iVQ8xWELNwCQU5xIbyQ8L+1D1W/kLGyiHGiyOlnxNICK4SeTkbhxg/PvZ+zTv5mnwmyOMbzOtnmbQVn3jmrJiDhZsX4q9v/9W43Nce+Br+6/b/EsQim+feVqRj6+jJ9HjpAUDc1+wClb9QlbGdrIGHOcRaGqxUPyud+vZLh+dOt/geFyKTpd3Hl+rDdZURquFCF5mUS7t5h9P247b315ygJPIElepzcTu2nFa4sHAOBZXcqx5nKlGeyfCEv56AI289UnCuafvB+Menc4ixDrEAoc60PVPMsTXVitMPPh0A8KX7voQb/32juB8qEJV0Jfd8Ubx27JLYtXFgo289bEzbj9seB253IBzXESZl0Tm7TMLglFFTABS/APMtExTRjNAhxju7tI60MAJcCIcYYDe5QLNBgljcqCQyGYFDjDnP+MgkUBbEtCfyTAQIcIjJpfphxio7xPSl+inuPkUEFxUfcRsaCo5M2pTqD0dkMsghxkcmBwbCRSZ1pfpyoT4QrxNLjuHsEPP2uTzLpMkhBpSfkxi5c4wovsk2uaBMghjgn6FSEJsMnVouXLGPR7pv0PNtuviS72tyavE9SfKyQPAsk1XNyGnYV/nBxnGIsS9MquoQq8Ihxp6HgdxA6JkoTcvZiliBDjGpVN8keNi4hFTCoByZFLYnvQ8Zusik3BvFRCpVfK493Y4dxu0AoNh1Kq/LyiHGba8/14/eTC92/s3O+MI9X1AuXy02kUnHdQQhx9Yh5nV1GZ4/13Xxp3l/wpPLn8Sq7lXlbVu4A8OW6vdmerVOQLYdWzeiTak+74Kp2iEWk2MdG4P8RZNMF4qvo46WDu/LyDGZ4tgFh5jjaIWJ4cCLTCb91SW8iMP/nE6mvfc5fxvvoPKV6rMovesIn9m8kGDVLQWNyKMRnka3FMvdeJGAd6XNXTUXz658Fut61wVGJvmx6xxiusik0W0mbc/kHAKA64+/Ht96/7cAABfNucgbm7A9C1EpbCxPOfbSutgEbxv6NvjWw78WTtz7RADAfQvv84/LJFBJ++qTe30SralWvLHhDbyx/g3fuPnHZ3J16faDbak+wxiZ1JXqG57nsDOv/vLZX+KCJy5Q7odmgwSxuJFMmh1iqshkDR1ibHuBDjH+IoyJLENDtXGIueXHywtr/PqtHGJhIpP1LNWP2iHmuuXleYeY65aFMLlQH4jXiSWHLKBEjZVDbGiovE/Ze4EXifv7y7fFxJ1jxHHMs0xKIo+pGN93X6lDzFRQL/+ed/KhTv7DvB585faWDjjVfcM44kz7Rr6vLMxlMwqH2Egs1U+mxMhkFA4xzYU3/1nG/6wVxEyRSYs4pI1rDVA7xGTBzmZ7rutW7BDjX/NyZFLnEAvbIabat23pNpx+8OnYb+p++MQen/CN3SQoy+tiyyzYvABretfgiWVPKJevFpvIpCw22R672D41iUyD+UHv/SMITpy4qXPH2Rb98x1i/POseg+x12/nQCd+9dyvsLp7dXlMIUv1eRGxtcEcYvIXTTr4yOTooVJvZOmUoiMH0SFWj8ikw/U/SrO9C4KYJIzwPWJKh5h03eM5xFy/mBJ08V+JcMHYftz2xXVZRCsDHWIKwU/l/LLZnspNx5bZOrQVd8y/A91D3b7Hx34+54PnACifd1USO2SvO/m9HBSZNDnENvRzgpjitfDpvT8NAPjnu//0zoPCdGyx/TCpYxI+uecnAQC3vXGbcns656FO1DQJrcq4Lu905ERWx3Xw0nsveX2MVmKlLj4bMEMmW+6Pr/8Rlz5/Kdb3radS/XoPgJAIcIgpI5Nu9IIYe7NUW6ovOsSkAnxL+A9EQP/NFP+BZtUhZnJSyZFJ9vd8Hjj2WOAb3yivR15HPRxiQR1i8rjkUn3HAc48E5gyBVi40OwQi5mQwwsqw+oQ4wWxfk6YYIJEIlGOm/KCWEyFRQHFN9m8A8LUvwX4RS7+d3kmxaD7Cu4ySSCKEtnlZRL8Al1eUiTJ1hGnnFXSIB5mMwq3zAgq1VdFJlUdYj6HmEWHWKWRSVkQyw72A9/5Tjn2bLEu3ZiGo0PMNp6mivUZHWKlcbmuq+0QM7m62GuXF2rYGNrT7fjBYT/A/LPmew6SsJFJwSGW7feinJX2ib218S1s7N+ovd0nViqisbavWwB4aPFDeHr50wDKMVTT88eL47rXoc1rxNghxs0yKTjEFAIX+//m12/GhU9diF+/+GvlmKwEMUchiFXbIRaH8xbHCXSIMfjI5JghceyeQ0xRSD5cCO5e/ovpREoQwWRhhJ9pku8Q0/UXMTFAJaYEzagnCxz/987/4ZN3fRJdg12BTqzJoyYXx2bh2LKZQdJmPTbbE9xm0jKX/+tynHLfKfjWo9/SdrLxkxQot2dwiLH7MlHztXWv4dmVz/oeQwL+SdTYemRhZtvReocY/1rYb+p+2HXirsgUMpi9dLZxX/FfTqiccqcecCoA4C9v/gV5J699/gIjkxZRXFN/Hdsmf+36wZs/iP2v31+5H1SOUNtSfV0PHCNXyGlfM80CCWJxg+sQUx1QjLNMVlOqH2WHGB+ZLP2p6BDjIpNhHGIsMil1iMllnmnu54odYqrIZCpVPqlauRKYPRu48caioBEUu4yAUA6xQqEsZsmCmOqx8ZHJ118vijyvvaZ2iMXpm1aOmkcmXe7Dju0DeZZJlSAGlMUIdnuM+puMKEr1eQIdU8sXa28PKtU3CUbZQlb5HLPjVDWYnGhhXGwuXOECMNQEBIoyfx5532SzIzsyKbgM2N9COMRc18WJd5+Isx46C4CdYyt0ZHKgB7jmmuJx37CuWjjETB1iOgecTTyNd5Hxy/Hbyzt5pegiuzH4bimTiMW/dmUHG/uiD1Cf44TuEMv1e+Mayg+FFlPW9KzBAdcfgE/cWXSrvb3xbRx8w8F4cNGDvsfAxqQSY+WLEt1rpHuoGyfefSI+9ddPwXEdK4cY/9zonJG615utQ4wv1ee74nRxW8d10DnQCQDYMrgl1Jh4BIcYGsgh5rq+DjEdwiyTA+J7zivV1whCw4G+usTsEOOL9QWHmOZinD//t3aIaYSER5Y8gocWP4Q/z/+zlfNLta6tQ1t9+yEoMqlyt+kcYrpeLH48QW6zv8z/i7aTTb6esnEYyWLKzKkzcfpBxT6yr/7jqxjIDfhcTyqXmEr4ZJHJ7ky3d1xROcQSiQQ+vdenAQD3L7pfOa69J+8NALjqhavKEVTF6+rY3Y/FlFFTsLF/I2YvnV3166oqhxj0ExBoBbgqS/UB0ZHHxk2l+kS8SCS8WJZ1ZLIGDrGoSvX5x1KpQ4z/hggQH6/QXSAUexbRdojpHGJBs0wyV1ChUIzJDUdkUuMQKy8gPbbO4gmp8LjkcalK9dk6urrUDrGYXETLCGJETUr1y/vWYTKkjUMM8AtiMRIjjAREO3yF9LJgZBCQAiOESxcKv9uIDmEEMd306kHONGGMISKU8r4K7FvjLm6DxMKs6iIxSBCLkSjrnfwjiSTrirEQxNjzsaZ3De5feD+u//f1KDgFqwJ7nYuMF8QEYS1R2kcZ//pEJ5nfQSZvrxqH2EBuQJz44e03gFXl7iiGT3xQHGdkUULlEAOArqEu33rlZbQdYrxglJMEMannip3XAGoXfNhZJgdyA4JgpItZ6ljVvQqO62DplqUAiu6t19e/jrveusv3GNiYgiYpUP3O6Brq8gTI3kyvIObpsHGIZfIZ5Ao5/Hvtv4UvEmw7xHj3V+dgp3IbcnySvd51x+2h/BBc18VX/vEV/OKZXyi3WxOHWBw+b0NEJgWHWL/4uukoOcRU4vFw4SU1Ev6khskhpoxMonzxLxsBeEeTjUMH0Dt5GGNbx2qdPPLjU0UP5W0FRSatHGKygKPpGlOV6stj33/b/a0dYjrn0D0L7sG1L12LglNQruvK/3cldhi3A97tehdXPH+Fz/UUOPbSuia2T/S2ySbm0HVZHbHTEQCARZsXCcux7f3qo79Ce7odTy5/EnfMv6O4jEIYbEm14IS9TgAA/Gv1v4zC070L7sVr614T11Vajn1Wvb3pbW/dNg4xXxeeom9NEFplsSuCUn3l9qhUn4gVyWToyCQfIawUdkHJTrIic4hxbreUIF5FU6rPP+ZQDjEbMcskiAHFCM1wRCYlhxhDGZkEyoJYmMik64qC2EhyiA1rZLK0D+RSfT5OFSSIxUSMMGJR/svjE3mkz/eBLeVvo7KFrHBy4HOEFexmIePhHSZB6MSzoFik6TZfhxr3mgzjLpN/D1rWu7jiX3MjyCEmHMv5yKQ0Lvminb1+eHGkN9trFWHULcMu5HmHDgBkmCCmeL8K4trJ/wNcdJF/GYteM9d1Ax1iLlzBnZD54w3Aqaf61iUIFC88B+y6qy/uKX+e6xxNXYNdvvvIy1h3iHHvA88hlvc7xJg4FtYhpirV538PA3usPZkeuK7rPUaduDmYH1Q+z7aTQfDPR3em29ueyU2lE8RkUXLW3Fk49KZDcfsbt5fHEXKWSQBY27tWuT2fIJbzC2KygLZ863LcOu9WzJo7S7ndSDvEYnKsY2Ow/VwVOsT6xOfIc4hpLn6HA/Z8yEkNuVRfFmsCHWKayKQqbscEgiAnjzyGZCIZKDzpSu5lVA4xm1jluLZxvmVU21N2iAVsb79t99M7xBLi9ZS8PTau2e/OxjmPnYNnVj6jXNf49vH4/oe+DwCYt2Gez/UUOPbSMolEwtcjptvn/CQMquV2m7QbLjqy+Bn8vX9+D4O5Qd9rhjG2bSyA4utY91rY0L8BJ/3tJLz/pvcL22Xr+uy+nwUAXP785XhkySMA9N10/3rvX9jx1zvi7rfuFsRDvlRf2FeqKG6EDrFzP3Qu9txmT+F2KtUn4gUXmVQdgI2RyQgcYvJ2vFkmdSdmth1icmSyAoeYHJlMJVP6yGTpf6sOsaDIJF+qzwtivb3DE5mUHWKmyCSgF8RMkUleVNM5xGIq5PAXO7Up1ee+2WE7X+cQ4+O1QFmMGIGzTJoikzKBgthg+aJZdnj4+sYk/cYG3mEShE4QC+oyC7NsGFErzH19HWLskDd6dPmPI7ZUnxPENB1ibAZCdrLHX3SzqJZ8H5mgyCQgxr3ySVcp0snryjp54KWXfMvYOMRyTs5YkK4aYzYFYNMm37qE+29aB6xYASwWI8yyMGflEFPMRAmE6BDjjtNsv7F18u9Jdu4R1iEmRCaz/UKUM8ghJl888OLoYH6wLIhp4q+DOU1k0rJUn19v91B3dQ4xaVKGd7veBQAs6VyiHIexQ4xb15qeNcq/6xxiOrdmppDxxs5PDMAjOsSKB7mKv+yKybEOgLVDzOvhYg6xXvH92pGD4BCLW2SSjQswl+qz6wDeIaYt1VdEunTl5/K6nl7xtG/sNtFE/n+2zTMOOQM7jt9RWE5eFy80sPHI69luzHa45YRbcN0nrvPGpNqeyrkW5Egb3TJa62zjZ+1Ube+Hh/0Q33r/t7xur55Mj1ZUYp/H/OMDzP1nqufZE8RKPWK66J7cJ60ScL7/oe9jTOsYbB7YjJXdK30dW/J+cF3/8xfkFmT3PeWAU3DGIWfAhYsv3vNFbB7YrH3+Xlv3Glb3rMbn7/m8IB7K7x/TvlJNoqGL6/ZkevCX+X/Bez3vCcuxbV3x/67Aom8twpjWMd7tVKpPxIugUn1TZDIChxjDm2UyVf62VnlCwkQBlUNsqHyi5ItMRuAQ80UmTbNMqgQxlbtLJ4ipHGLd3UC2dGI5nLNMlv4uOMTCRCZ1gtgIdYjxFzu16RDjyzDZH3N+tyCgFyNGokMsxOFE/vZ+UBbEuN8DHVLqKg8jYSKTOjdZkJMrzLJhBLEw9/VFJqsVxOISmUwkkSy9uUyRydEtxcfpOcS4i265AN1mlklVZBKAEA8DSvtZsa+EdaX945bXqy3e5wQRvuTX5N7RjUkQH9hOlcYlCy06QYx3pHkOMenikxeeCm7B+7wSIoWyQ8wmMhm2VJ/btz6HmEHc/ubD38SUK6YIgg+/f/gIo07ksXWIaQUxbr1bh7Z6Apz2vAt2kUnescUvrxMueeSeuTW96v3D/8wLg6Yx6WYvZfCfJy2sQ6xBSvVtOsS8z7PSpDyj+8TjhtwhVtfIZFKuLjGX6s8YP8P7mTlQeVFC13llikzyohQ/NjYu1j3FUAlwOhFEXu7646/HinNWeEKCSqA6eb+TsfTbS/G9D35PO3YA+PKBX8Zxux8nbEcWcEa3cp/tbOxBjjRDJxsvMPLbZX/fa/Je+O3Hf4s9ttnDG49OVOLXpXKInXXoWThu9+PE5RQiFusR8xxiGgFOrs9R7dOWVIv3GaKKs8pjN72uZFTbu/a4azFj3Ax0Z7oxb/28wNcV//jkdcnbsuks0wm7y7cux5fu+xLOfvRsYTnj+0vzmmkWSBCLG8mk0SGmnGVyGBxigOYCg4ko/EVYSWRxM5wghoQYmazGIcZbtXWRydL/WtFI5xDjRR8moPEOsSx34sF/Oz+cs0xW6xDj46C6yOQI6RArOAXhhLoWgliW/zaGvZ50pfo2glgM96MPxwnlEAsiw50TBAlCGf/5QyBhIpM6N1mQ+GRaNpSoVYWYFplDLCavQf5YzhxihSS0gpjnEHP8DjFZEIvKIQYAmZR/TPK6dAKVjUNMFnl0gorNmATxgX1oSOPyRSY12wvbIcbfnnUsHGKKyGRgqb6uQ0x6LnkByPRefnrF0+jOdOONDW8ot9GT6VE6xOR+N9XzLLuvdE4eOTLJi3k6YdfKIVbIeOPil+fHpRuT/Dzzgphu4gZtZDKvF8T4ffrympex/3X7e7PIJR14yYJGKdVnkcnR0gfsGO5p7mgpnU8yh1if+BwxQUzlGBkuhAlRQpTq8+4x9ppSiTwM3hVkck+Zis0vPupi/PGTf8Qxux4DwNzP5K1DUdHCSCQS6nElyjHA3SbtJnakBURCZZGHrf+iIy/C2R842yuMt3E0CYJf0PY0+503IgQV9POiGT+unx/9czxy8iPe8Z0Xn/j9Kc80qXMLyhMCaF1w3DWj9vGpltG8FqaMmiJsl98PralWTOyY6I07yOE3c8pMsUOstNznZn4Ou0zYxfu7SrRl76uX176M2UtnK8XY6WOnC9tjX2oFOjANonSzQIJY3OAcYtazTEbgEJMvKFWCmNK+b4hMulwJcZQOMV4AFD6ITR1ismg0NBQcdwxyiG3kLsCGc5bJ0t+1cdDNm4v/23SIjXCHmHwhWbdSfT4SycPECOYgi1GhuZGQHWJhkE/ew/b7qIgiMinHbeVeEtOyJjHN1/slPX6fu457Tcuvb/6iFqhQEIvRa9CmQ8x13bJDrFVyiHEX0z6HWMgOMZMgphK7XNcVXUEagcqmQ0wWeXTxRN+YVIIYL4gwQaxChxi/H2w6xHTr0naIsVkmeYdYWvzSz3Ed4b1o5RCTIpMmhxhbn8611JvtVc766ItMKuKUKofYY0sfw4l3nyi8Xvn7bh3aKoxdF5u0LdX3HGI5vUPMdV1c98p1eOm9l5TrAaB10Mk/s9914mEmnxGeD/75fGDRA3hz45v469t/BVD8EtATyhtBEOMik+MkQWxb7iXqfUaxDjFJ72Kl+uwCuZ4OMd8X0wGl+jzsNcKLLiZXkKmY/sAbDsRJfzvJWx+/3JTRU3Dawad5QqNpBj/58WmFLJVzzSA+BUUYdRHNj+z0EVxz3DXel0E2kUmT20eOHQbFE23XZXI9KUUXbhlPEJMcYqYx2S4XNFmD6rWgm3jJRqgLmjxhnyn7iG660nJ3//fdeOPM8pcy/H5nY+1IF1+/f3v7bzj2L8fi8Xcf9z03u03aDa+f8Tpm/ecsbz1sbPxY5e1TqT4JYvGDn2VSFZlUxAlq6RBLJ9Pem9FaECv97BTKJ7HFDrFoHGLCBzFf5sntL6VDTBeZrLRDjDnE2trK9+PXEVVkMsghpivVN0Um2fJhZpmMyUU0j3yhU/PIJHs9yYIYQxYjSpGHEecQCxmZrIYonrMwkckwy9piEtNMxdhhiSwyGZPXoBCZZH9LQDjG8KW3pg6xSiKT/M9Gh5giDplzcsLzPtwOMasYp5dDFZeTX5O6ziu+VN+mQ4y/XZ5ZUDXLJPuff0/yNQ2qMQ3mBjF/w3yc9LeT8M6md4RtMPpzkiBm4fbkBRx+XT2ZnsDIZM7JKR+fShC79qVrcf/C+/Hw4ofLj4lbb/dQt7BPbQQxU1+X0iHG7dOck8Pr61/HNx75Bs546AzlegBg00DZEa+bZXIwHz4yqZpVlz3+lFOuwqg4MhmTYx2AoiBWGs7YrCSIcd91sAtfTxCTDgNyZDJOHWJyqb5qBr3rP3E9dp+0Oy4+6mIAAQ4qXmwwuG8Wbl6Iexfc6y1rWpeyQ0xRhA9YOGvgd2z5llGILqplbLcniyCyWGzj6gp6fIKDykLwU0UmlcvZRCaDxhSwr1T9YFZRwdLfZINIkCONF+CC3HvpZFo4b+DXxf+sGvvXD/k6jt/zeEzqmAQAWN+3XvncHLjdgdht4m7C2MO8rqhDjIgHQZFJ6dtToLYdYolEwjzTpMkhxi3m+9AMId7JHxj847WeZdJUqq+LOwY5xDZs8N9fXkcEWDnE+Pge+9lUqs9gz5tNZDJO37SWkC90al6q7/1RmmWSoRMjeAdZDIVFHxFHJmtNUGSSj2mEiVfGDdlt1iil+nxkUnaI8Rd7TBCz6RCLMjKpEp98sUNNh5iuiF23DL+cMTKp2Z4gPqSrc4jxkUl2H3kZWdBmt8v7lv/ygq3L6xBTRCZ1otJgfhA3/fsm3LvgXmHmRGNk0uQQy/lL4PmfezO9gZFJQBQPWVxUVarPxsV/dvkcYpkaOMQMHWKbB4qucva/vB4ZQVQuiAKzan8aI5N5vyDGHn/KbbzIJDtmj5Pe2rxDzBeZlJaVS/Xr2iEmJzUkh5jKbXPGoWdgybeXeOXztl1PQWIDI5SopIm22TprTH1dJtFFXk81jrTFneKkKQ787iJ5H/jGbuEQ0wp+3P7kH5M8dpWDCig7xNhneKQOsSB3m0LQnNgxET887Ic4ce8TrbanfC0YXlc68ZAfo8qxdcB2B+DBLzyI/9j+P6wfXxROx2ZhBF3yNAkBpfrDPcskf1tYh5jLDb8YmSz/QfdBpkKXG/d9ECNkh5jJIcb3bOkcYiwyqXNiDZdDTBb7GLrIJL8fdLNMqiKTcfqmtYQsENStVJ8xgtw5RmoYmawFQZFJ/oI7TLwyblTUIab6PSairHAsZ2YmSRDjP+uMHWIDtYtMquKQsohl4xDTRiY1DjHd8qbtiaX6pR8CxDytIMY7xPJqF5lv+woxL+/khQumMJFJnyCWG8S6vnUARJHHF5nM2DnEgiKTQoeYYiZJ1TZ0Yl7OyXnb0W1vbe9aYV+FFsQ0DjF+f8gdYkFjkrGZZVIr0hUyWocYu2+kDrE4CWJcZLIt56KVOxXjHWJyZLK1UP5CFOAcYiUnVl06xFx1h1gqITnEDJFJ/txeOxsgLzZIAoHKyaLq2JLXpRIb5GsgfnvKcYWITOqEIH6Mti4klQC3/djttftAtz/lsWujnKZ1KRx3/N+NY+eWkWeZDNshFpW7jV/Ppf91qRc7tHXvqV4Lvn43V5wAgl+XIFYaXJM27kTV+0a1XFCctZmooL6YqCmJhF2pfq1nmUz6HRXKCwyTIMYtFkmpvtQh5otM8qWXQbFCnUMsqENMVapv6uqKAKtZJlWCmE6o4wUxVal+X1/5+Yy7Q2w4IpMOH5ksPfZ8vnJBLCZihJFhjEza0pHu0PYHmWKQ8jfWtYhMDheyS0IpiKWlj/Vksvg3fgKUmIiyQjFz6YBWkCKTKkHMqkPMZpZJ21L9NIJFpRSAgn9/8uvVRiaj7BDjxQfLDjGbUn2dQ8w3Lo24ptq+qlRfjkyqHGLr+9YXf9bEHH2RSY1DLO/kla8l/ufebK8nJuleOzJeBNXxO8TYMUwlBAHA6p7Vwn2qcYjZzjIZ5OqS0e0HvktNmHBCis/qHGLsvp4gFqVDDCi+D+p5scd90dSSd9GeB7Klw7XgEJMikwkUXWLdpY8u1iHmlerXOTLJn4f7OsQUkUmGyVnDCJqhcNeJu2JZ1zLvdyshQbG9Q6cfilP2PwWJRAK3v3G74L7h72scl0V3lk54Ms2cyP8uzPpYuu/3PvQ9DOYH8V7Pe7jrrbuMIp0sugTNIGmzriDRRSU+CaX6UmTSpteMbVe1vUr7z6p176leC0qHGO+m4x1i3M+mTi9+XOw+Ni65oHXpHl+zMII8AE1CMml0iBkjk/VwiDFRgBcCWGRSdogJ4lX4Un12UOIFQNEhxglipf/5MQgCRl9f+QJHNctkmFJ9U1dXBFg5xFTxPV1kUucQ4/++dWvxf1WpfoyEHF9kshal+i4fmSytP2xkkpFOx0aMMMJFJuX3ar0+LE1ClhyD5MfYkmwRTkpGsiAmY+UQA8TXYVxL9TWRSXYx3ZJsKTsiLDrErCKTXHE7//fOwU5xXQrxyScq2ZTq6yKTug4xRy8qZSwcYkPsZS93iElih058UnXhBblRvMikIXLnOeAK/g6xoMgkUBaNBvJlYYV/TNlCVnC3yU5ihqq/ChCfW77kfjA36H3GGMVKzf7MFrJKNxY/jqoFMd4hlg/uEHPheo9vKD/kvSeNz5+mQ4x3iOkmk8jk9Q6xmnSIyYJYPeEcYi15pyhslRA6xFhksq38ucb3iMkdYnWNTMrn4fIsk5YOsSCnkk4gePrUpzH3K3OFcQU5jFTLtKZacfuJt+PLB3zZNyb+vt7vIQrzbSKTQAhhUNpX49vH4/JjLseh0w8tLxMQ4wzaXpiZGoNEF5WjSRWZ7BzoFNzEJtcTW59y7IrnOezrSh43v90w4iH/OcTWpRMPdXHWMG5B+fEFvY5thN1mgQSxuMGV6lvPMunUrkOMvy10ZJJbrFiqX36TRVaqL3SIGRxigOju4ol7ZNKmQ0x1YasTxHg3GXOIyYIYg3eIxVDIGZbIpBNxZDImYoQR7pts5sphyL8PBykkjd80yzFIfozpZFoQxEZyh5iMF2sNI4jFyCEmdIgFRCZbUi3ea0DVIbapv1z6DdiJT+xCXf5sU0YmbRxilZbqR+gQE8SRkLNMmsSuWjjEVB1i7L0sr4d/T7/X8x4A8fgvP9/MbQDoI5NapxL387redd7PLtxQgp8vMmkRT1zdLQpiuu1YdYgVgjvEgGKRP0Pl8JIxzTLJ7l9wC977VBbQhP40xX5nr0PeIVbxZ3vMBDH2JUZrzikKWyWmGmaZBMQesXbmECslOeoSmXQ0kclkyjtOJ5Awnuvb9HAFOXl2HL8jZk6d6f1uKyqFiSYGjivIQWURmQxaTimuVRDj5LdnIwZZOcSCxMMAYXDyqMlIJpJw4WJT/yarMQEWXXGGUv0g56E8bv5/k1AnL8NPRiLvA3nscpzVakIAG/eeySFmIR42C835qOOMbWQyLg4xQ6m+ww0/CoeYHJlMJVKiVdvUIQaoBbFkUryAVEUmUymzQ6zWkckgh5hKyDKNS+UQ4yOTPGPG+O9f75NKDjkKwx/4o0LrEGvwDjHmEPO+rS5RD0EsLfWSyLSnxOMXP2afIBayQ4w/OYibmFaRQyyupfoBglhrqtV7HlUdYvKJp1VksvQ56pusQHb2qDrEZFeXTal+yA6xwFJ9lUNMiEyWfgiaEMCmxN9iGf52m3V5kUnuPek5xKTtjW4Z7X3es9eNad8KEUpNZFIV15Pvu6Z3jfI+VpFJRak+245OjJPdiWFmmcw7eUE4kh1i7BxKFlC6M5wgZvP4NB1i/CyT/G1yxYduFk35fSg4xCqNTPIzgNf7M5ePTBbgCWIdOWAct7vlyCQAjOaesg4WzIiBQ0wWxHiHmOlLLEDtYrHpetK5feTlKomHWc+caCNQ2WxPcmxpRRBVl5qFCKITbwBRDDJ1iAUtY+0QU/TAAUURdfKoyQCKX2TYRBP5/61mtbSIoJrEUdP2TBHNMw45A3ttsxf22mYvb126vjVbcdTG1RV2BlCTkNws1FUQu/jii5FIJIR/e++9dz2HVH8CSvW9yCR/ghGBQ0y+SFR17lRVqo8IOsQkh5hs1eYdaAn+4oqhEsRGjRK/QdRFJlUOMVUHGVuerSMCAh1iGc2Jq02HGHveVMX8Y8eWHWRALJ1N8jf/w1aqXyg0vkOs9NR7J+covo+FAuxhEohaEmnjRBzy8YsfsxzhkMWzIEa3lMWm0a2jDUsOP9aCGBe9idNrUDiWl45shSSEcbGL99ZUq/c8qnqfdLMdyqjiZbpInbecbYeYhUNMFevWOcTY+FRfIGk7xLhzg1zKLzAqxx7C1RVYqm8j5hkik3JPKi+IyuK8bQm8ziEmxPXyaoHKJ4iV7mNyiNlEJnVinAz/uJZuWYrfvfw7YbZKfhlZxOrP9XvvFReut02TQ4yNz3qWSWkCAv59qNtXfDedrrsNKH4JmCwdqyKJTNb7M5ePTDplYWtMVhS8rBxiAFrYFwT17hBLil92M1Hb9CUWoHbWmKJfQX1XbDmrqFkIsSFwOQuBKrKxW4ogNtuzcRjZrEsWeYwdYhrRhRXrb+zfaO0QCxSyLJ8bGwGVLatcziDszhg/Awu/tRDnfvjc8j5w1a8rn7BrIfhZ7QPudWzqW6NS/Tozc+ZMPPHEE97vabkQuNlIJo0OMWVkMgKHWDqZRiqR8tYVhUOMPwUvdsRw4lU1DjFudhshMqlyiPErUgkYprhjUIeYzToiINAhZhMF5cfFC1/seVO5w/j+MCA2rhIe+UK2Nh1i5f0lONAGFRcxshjR1ua/fSQ4xBzH+yabvwhtSbUIJ7mjWkYJFzvpZFroHUogUbFrj79vOpEyRyYlYU4es6lDrCXZYoycjGoZ5XXsjGoZ5YvT1ZPQDrFUqvg+jslrUJipLIRDjL3GjDPhKS7oHdcRPjtzTg6O6wQLYhV2iBWcgrA9Fy7yTt73Wg5yiI1vH++97ka1jMJAbkA/y6T0uDMpoEPuEJOXCSFi2TrEbErZw0QmW1Ot6Eh3KJ1RgFnA0QpiNg6xHlEQs4mOejNySseV/ly/95rXOcRk+LH88Ikf4t4F92Lb0duqBTFpH/BCF1B0iY1qGeUXxDiHmCeIWTjg5J/lvhzdvuKPobqoKhCRQyxOkUmNQ2xMVhS8lA4x/vbS/VqT9XeI+WZ750r1g2aTV5aR27hvNAIBWy4oamYbk7MVeRwEz8KodbdJY7eJTFrFAHWOO50jTSMGhXXv2Y5L3g/bjt4Wb+EtbOgzOMTkDrEQ8dJK+t18HWIVOMSU+wDqfeXrk7OIhAY9z74ZQEOIec1G3SOT6XQa2223nfdv8uTJ9R5SfQlyiKkikxE4xAB1sS2gdqV52DrEEtE5xPjIpLZUn4lGNg4xHt0skyqHmG4dUUcmgxxiqjGZxsUvz5431Tr4/jAgNhfRPMM9y6TDfZBZCWKqUv2YuHOMcJFJPiLZkmwRLuZlxxTvppLvC4QTwoXtJlLGHjBZ5JIdYvx9+WObaow8yURSWLf8+Kqhmi8wGKEFMfZ+j8lrUHAZlA5oJkFMLtU3CQmqi0TVZ5hc8q1cl4UgphKoVIKdalxBHWIT2id4t7GfdSX+PqHOxt0WxiFmWapvFZks2EcmgxxiRkFMF5nUlOrzf1/Xt068T4jIpI3wBNj3dW3oK/airdi6wsohxjuxgKIgVnAKvs9J1bgq6RCTt6cT6gRBTPMcAEC6AUv1vQ6xQlnYGpOVBK8WvyCmdoiVjof16BBz1R1ioSKTihigqZ/JqofLYjnbmJyVyGMYu01Pla5IvZLt2cQ4fQ6xWog8NmKetAz7XOvOdNt3iFnsq0B3myGaGHYGUFuBSucQ8y0X4Ba0nkXT9nWsWVezUHdBbMmSJZg+fTp23XVXnHzyyVi1apV22Uwmg56eHuFfwxHQISbPwARE4xAD9IKYlUOMvwjTlepH1CEmXETx62Rv9LY2tUNMJYiZ4o7MScU7xMKuIwIEh1gqZe8QCxOZVK1jBDjEhj8yyT32AcVFdAN1iLFohyBMKRxiPGF+l+MUssglFONLHWKyEOcTxAwdYi2pFuG4IY9RWFYSAOVl5ZMLmxm1dOuSp67nl9cdL0MLYuy2mLwGhR4a9reoHGIKwUK1fKYQLIipRCWfiKXoEFOtVyXcBDnEVIKY1iEmPW4bd5tcqs8LyvK42bK6uLQsiKlmdZUFHMEhlio7xFzX9TnEeKqNTOpcWvy6eMcrUF1kUiivNzijeIQutNLj2Ni/0cohtnVoq/B7X7ZPKZ7wy8mPT56gBNB3iMnuWZ14qHWIKSKT7LyuIRxiUmSSCVujLSKTwu2SQ6zukUn+PJwr1Q+MTKocWzZ9SQbXk42TzDYmZ11GXs325LGH6T+rxDkkd4gFxQBtBbgqe6psivB9HWI2+8rC3RbouFNce6qWsxVaddsTloPdfrBy75lexxYdfc1CXR/1f/zHf+DWW2/FY489huuuuw7Lly/HEUccgd7eXuXys2bNwvjx471/M2bMGOYRDwPJpHGWSTlOANTeIWYUxJizSBWZ9DnExA9NW2TbqicQJeVS/dL+am2t3CHGiz5xiUzyDrHW1rLYly49dtXjamsTy2T5calmmbRxiMXEVcLji0xWGM8zwUcmHf45rcQhFqP+JiOOUy7V5y5CgwSioN95oSqUeCY5xGSnVtAsk/yYU9K65O3y625JtQjHwjBjli8UwoqFJiceo2JBLCbitrZUn3tveLNMcvuEXdSrhAT2eaESLNhnGP/ZWkuHGBtfa6rVO2Gt1iE2sX2idkz8fb1xcmLe1qGtyOQz3tjZmOTtjW0b61uv3CE2pnWMbxl+Xez/8W3j/csYOsT491vOyRkdYjYTFgD6jjidGGMSqHQ9XDxeZFISKnQOMdsOMSaCbRzQCGJ5C0GMGxM75ig7xErrYmXXADBl1JTibYWMd/FUS4eYEJms1CEW41J9oUNMFZnkKheYQyyZSKKltCtYh1jcSvWnj52OBBLYYdwOxnXYiBLKGfWCergsHEY2MbmwMUCtaAbD45MEqmp6sVQiSOAEBBYuJBvByBTLCzPbpm0HHBubcVwGoVUlxppisWxZ1fZshF0bQdO3XJjXscU+V43dRnBuFuoqiB133HH47Gc/i/333x8f+9jH8Mgjj2Dr1q3429/+plz+ggsuQHd3t/dv9erVyuVGNJVEJuPgEFNFJrnFig6xYMeDCjk3ru0u4AWx0t8EQSyMmBWnyCTvEOPFPnZxayP08eNizxnfJTRSHWI1jkzy/RBA6QOR7YdGd4hZdojxmH5PIxXqvrwIlEbKKMT5OsQkEY8XwGTHmGm7LckWozMtjKgV5vHK+1kX1axaEItJZDKVSHmCWKFKhxgTYLK9W7UiVke6Q4jm6QQT9jmVsYhDqpxY7GK/I91hrB4I7BDjRCUvMqmbZVJ2iJWca72ZXuz8m53xkVs/4i0zrm2csB1PEGv1C2LymHSCmLyu8e0KQax0/uJ1iPETdXA/Z/IZa4dYlJFJk9vM5BBj49NFJnmBqpLIJHscq7tXC04vXVcXL8ABRUGMX4YdV0yRSUEQGz3F+1me+AHwd4h5+0p6TfKzTLLXvuu6/g4xF17VRqM4xPjIJN8hNoqfRZJ95nLHciaYtafavfPb1kQdI5PcF/HC5FaJFHYcvyNe/trLuP/z9xvXoXIh2RS3B/ZwBYhBtjE5G2eNyfFjM+ujbWQyTBxScBcFxEsDZ7UMK/IorlvDuOlsXXJsbPJj8m0voFTfRngKcqQp12UhoKpML8oIo8Fxp31PaF7HpuWavVQ/Vr64CRMmYM8998TSpUuVt7e1tWHcuHHCv4ajksikU87yV4NWECvNyqb81tRQqs/P8FiNQ0zOcetK9b3IpOAQS5RdUNV0iFXiEKtFhxgv9rUaBDF5TPy42HMWJPSNhA4xKQoTdam+/A2/67rlk9RGdohpIpOymBTUGcb/3ppMG91WJrEpnUgaRawwkclAQYx7TK2p1oodYmHFQ8GZFiCmMRopMllJh5hSECsJMJlHHgSuuEK4jS3fnm4XZjPUOXTYukwOMSYMmRxiHS0dyglx2PHKJ4hJLittZNLCIcbEvBVbV6A7041X177qiR5MEJMFHPZ31ePVCWJpya2iEvPkdZkik2wd/PMvvw8G84PePjQ5xKxK9UPO+si2xx9LvOdGiqCy45MQmbR0pKkcYsu6lomPQyPSmSKTyUTSEx9VUU62LuYKk3/O5IsuMX6bcmTSZgICk8AYiUOMP4+u92cu97na4ogdYim37BjzPs+SSe/cmkUmO7jPurg6xADg0OmHYvrY6cZ1qESXSmYotBV5bFxPKrFBNS6VS8c49iojk2EihTZRQdvthRV5VNetNjM6hnXvsfWZ1mUSIm1inL4OMZtobIAjLZR4GMVzY+veM8Rsm4VYCWJ9fX149913MW3atHoPpX4kk2aHmCoy6dY2MslOfHsziiirZam+r2egEocYi0xqvpnyHGLpdFk0AswuKJO7y9YhNpyzTLa1lcU+kyCmcoipBDGT0KeLTMZIEJOdHVE7xORvXh3XqU4QS6djI0YYcV19ZDJfHncogUhyeYUp5Jcjkz6HWEBkUhbE+GNlkKgVRhDzucsqjJcGiWkMTxAbw4kTI6hU32aWSSZ+qRxiKuFCcFAtWiTcJghizLHFOcTkz1wm5ig7xErCgeeyMjjERrWMKgtwT/0TALCpfxO2v3p7nP3o2f7IpKFDTIhMKp4/WSQcYg6x0kypjutgbd9aceyyQ0wRmWSCBTsmyoIYvy6+uJ0fO3tNm0r1+c/1TCFjjEzys4ayx80LcNt0bANA7yTmPz+sHWJSZJJ/fJ4Yq3HThYlMys5813U9YW91j5iOCBOZ5CPI7NhmmmWSd4XxbrGh/JBPxLKNTPKw/alyaRY7xMoXfxURM4eYapZJJnaVRS/udV46drPIpHCeXsdSfeHLjGR1X3bblJHbuLrYuKx6sYJieQFRM5txhS7VNyxn0/VkU5BuvT3FvrJyralEHkU0Vuves+hbsxWoTK6nMA6xoHGFFaisxEOTSBw2omnR7xYU5WwG6iqInXvuuXjmmWewYsUK/Otf/8KJJ56IVCqFL3zhC/UcVn0JcIgZZ5msUWSSnQR1Dnb67xQmMsm93Cr90AT0ZZ6eIJZMIpHiRDmTQ8wkZqkcYmFFtQgQHGItLWWxL2xkUu4QC3pcushkvb9l5ah1qb7PIQa3/Fr//+y9eZwkRZ3+/2RWVVf3TPf0XDAMMMAcMMMtl4IgIOAFCoKroqCyi6uiu6L4VVfX8+eq6K667rKrrLqiroq6eOGKXAKKCCIoHiDnDMwMxzDMPd3TR1X9/siMqMjIyMhPREZWZVfl5/WaV09XZ0ZGZuX55Pt5QmWZlAWwmUqICTfuMaHGIBcrIhB5Va0NMNZWlW631FkmxVGvgODcY2SZFEWtasr6augyl3bLwXC35IKYeB6boYSYz252PSgzxAYqA7EMMZVwoSOokggx9jA+b2heZHpOUGnaoohmomVy8pKLge3bcdcTd+GJHU/gh3/5IZ+Oh5ZLxJbSMkklxMJ+iZlTj219LLJ+nGjS5IOlEWJiW2IfRMvkglkLIm0x0UUmPMVgfS7gVGrKsH8u4ITLnD80n/9tj+E9AESvE9/78/cweukofvLAT6KUFjVDTCKamEAJtL+nqeZURKxj5wVxf00T4Hhe13TbXsqOF/k6lyQ8yRZGMUNMJC7F9uTtOVof5fdZo/XRyD2o3G9Z1KKMyMmmUYmClSbgh9fLXrFMioTYvHCVF4SbbXgq6GvkeAjP3bMVgljN6z4hFosuEa6taUWx5ZmQQ7G2LGxyVMskpV8U2kcmtpIEHFsRJHUAghRxjSzAGYo8pFEtNaKS+DPLQAy63DZqvptJWzo6kTqdMb1H2I/TrJz9UF1d63Xr1uE1r3kNVq5ciVe96lVYsGABbr/9duy2227pM/dqCRliqp1SO8pkRkJMNfQ50L6J3Ti2MT6TxjIph+r7QrhpJkJMyEyLZBewBfo+fN4Hz0zMsskQ6yQhJtpBGSFGyTUD2uvQS5bJ8M2/PPqMq8pMiNXr8b8XcDvGSrB2xAgxjyYmyX+veRXUhEtO+rzCcuVQ/bRRJsU+VwwzxDSh+jGqTbe+KTlgJvPKfWS0wFQFwblX3M+qigeSGRGqH77wkAgxXYaYSrjgApXGwphEiIkEDCBZJhMyxHS2Sp4hJlgmJ9AAxsY4cf3kjif58rnYlcMokyLhvW7bOgCC4CctT5chliSasXnE3C8gKuYxsUoUeYC4oC3e5+gIMSAuuoiC2KLhRQCC6wS72b/moWuwY3IHblp9U6JlkkKIseWJgp/4PU01pvi2Ug2KIdo9WZti33efvXukL0m2T3Ea+buXj48IISad21jJGWL1Sp3vJyP1EX7c7Jrepd1OkX7pCLEpGiHWK6H6YobYRb8FPnwz8LY7g89mh1h2ZD+XCLHo9Tg4H3ZjlMkI3SsKYp6BICa87KZYGKmiUtp0FGFGXF5av9IoJErWk7xMkvhEEEG0Io/O4ufFRR6dAJe0fmI/qXld1AwxUn6WA4smoCfljEPuNSQWRWQ0tmgSSMc0yq8fin7myqGuvPLKbi6+mCWMMkm2TOZMiDHbgZIQY0KKKAQoCDG5fy4IsYpfibQTIcR8QZTTjaQoi1lJlkmTDLG8QvXlDDG2zScUN5vUDLGZHqofPiQMDwxj++T2/AkxMUPMNlR/BhBizVaTC9qxUH3bkRO9CmpCsGDqvBFCzNeH6htaJnWCWIzU0ghTWpunX4vcpJiOMinejMWWOwVsRCiK1GrR/WwGhuoHglhIgvhQCmKiSEghxCaqAHapRazB6iC/+ds1vSsiiD3wzAN8ei4YVZEodukEONbuUHWoTT2F0zEL41RzCo9vf5z3fePYRv0okyHFNl0Jtp/8+JE0yqRIiLFQc0qofs2vYao5lUqIMZulSHWJywAEQUxjmeS/TygskxpCjPVPRYg1Wg1MNiZRr9bxxI4n+DYQ73nGpsbQarXgeR4p5F4nVrJ1kwkxsRhBVq/W+b60x/AePIeLWRV3NYLlid8fq/lD87FpfBNJeGJtqIhLseQg/MHqIEbqI9i8azPm1OdgsDqIbRPbMDE9gYmKfnnythoZGOH7PV8eI8QU4nYkQ6xXCDHBMrnnduAjvx0GdgTf7SGbqrh3QRMrF6xszxOeu+eEm1oUVwdC8anblsnIi2mTe3tRoDKgfZJEF9aOUdg6ISheOZ2ircz5Z1TLZIoIohPp2HRM6CJRSGnioabfkekIQfE6cS2WIUYRNVOEQYpFk/WLJFClkHm6/SU2XdL6CTby1G0lbIO06cpQ/bKKVTaWyZwzxLhlcoxomZQIMXYwV8SLpoMMMa1lslpr98EmVD/JMkmhsfIK1U8ixFxmiIlvU5MyxAok5LAHO/aW3nmovnSj2YIgiDUUN+g9MsrklNf+juMCUTXye0Rcki2F4ttsVCJ0WWqGmGh7REW7nJhlMiVUXzxv6Mgt0wyxLKH6NoSYtSBWkH0w8nIj7IrOMmmSIWZjmRRDw2NtJRBi2gwxMVSf5WeF4po40t6aLWsiy9MRYiJxNenFz8VJo0yqBBUu5kkCnJghxgS4iUYQos7OiTJFxn6fbEzyPsgiFnu5ZmOZFNuq+lX+XckWRka0A8Ci2Yv4/9nLkye2h4LY5LaICNNsNTHVnMJ0c5rvX2Lf2XlCXl7SdzMxPZGYt8aK7XesH0zAA9r3XZwQU4yUyaZptBqYbk5rrYlANFRfzBBT9UkUK9l3O6c+h383FEJMFtdUgzWkEmJslMleCNWXLJMAIvdq3/jFQjz2zsdw4G4HtucJ6d9TVwOvqx6F9z33PfxPRQnVj9yHm1gmxdwogojlymqmI7ZUIzWmTUex5WWl20jLI1gmY21R8tYo9lKNZZIyuAATeUxC7uX2VctzIfgBFnZPQtC/9XejEpIJAqOurbR9ph+qFMSKVoJlUrVT6kaZzI0QM7VMhv9vk26I9S8LIZZkmawKlkmPiXLh7wDMMsREQqxSKYZlMokQs7VMqtZrfvvt+kwgxNjoWCzHJW9CLGKZVBUlVL+AwqJc0177O5bth6KoZSL61LxKREyTRa04bRW1TOqWI567PHiR81fVr0ZIiEpKQL882mPEMmkyqmbaCJWaPLK0/LHZoiAmWyZnUqi+8HIjKVRfmSEmjTIpbjsdsZUWqh+zTIptyeJag5AhxiyTKkJMsDDGLJOh+JRGIU368XOxLFJMSESaWBRCTMzIEgUqXYYYm6ZeqUfEadkyyUUXimXSb1smd5+9Oz8m5BB4sb/zh+bz8wabTiTEZBFmbGossv2YbVH8f8wyKYhgs2qzIqNtckJMYZkU22LLFAWx3WdFLZMqQVPcX8enxs0JMT9+rogJYpU6F0hHBkb4uVaVISaXTK4pBbGUDLGeIsRarQghBiAyQnCtOoC95+wdnSc8d8+aAr4+5w04a+VZ7T8xQqwblsmEwa1sX3Y7DSM3oL8oJI9yOorIQyGjZMskoV+pFs0UWx5lhEXTTC+dyKPKP7MZkZNqZzUS/ByIlcq2CAIV9buhjHBKsaAq+0X4bvql+nOti1y+ryfEwpv4RqvBL0jOCLGK3jK5ZdeW+Fs6HSEW/srCkiMj0RhcNGVMNnF0G3agm4TqU0eZ7KZlUiTExFEm5VD9QeEtu6llkn02MNAmwwqeIdZqtfjoWIxicC2IyW9eI5ZJVsINbe8QYoIgJgpTUoZYTRKb4qJPMiEm5wHFhCnJMimKWPVqPXLRFi1XskgnU2wmGWKyrcjU9hgZVTNFTJOFON28MUJMPP/OQEJMO8pkSLQM+MkZYqJwEbFMUgmxabUgFrFM2mSITbdHmRwIv0+dQCUKYiKdmpRTNaEixELxgW0rNsqkSlCJheoz+ktBiLG20wQxnc1RJsR4TpXKMolkQmzR7EX8+NGF6g8PDHMxaufkTkw3p/H0zqcBBIKYbNMbnxqPCDPiCItcEJtKHmVyqDYUofh1lknW90azwafbY3ZbEOOWSU2GGNuebDom0skCIysxVD8pQ0wW6Qargzh090MBAAfvfrBdhpgib40vb0pjmXRNiHX7mttsRjLEAOjvH4A4/StQ/EUI1Y9liGUkxGxELPEzssXPQMRKmy4t/0w7umJCqD4lQ4xkCdVQSBS7p7itSEHxKblYqfQekYDTUk8G+4L2u5HEyrTvhmr31H03FCLSZHRWo8EhNIJzP1QpiBWtUggx8UaHXQTzJsTYDWYLrdiw2tpRJjkhFgpisLxoCoo5kPxmqtoSLJOqUH0Tu2OrFR2NUUeI5W2ZTCTEwm3IBLERwcJCtUzKJ75qFTj3XODgg4N/YhWMENs1vYsfA4wKEE/8LkoZqi+HlpsKYgWhc3Q1JZAncUKsfZ6JjRwpB8VLgtiAKGpV6inC1KzIvKJAJNMN4nkxTQBLFcRky6Tf/g5lIc7E9pg2yqS8XCPLpOfFRS+xZkSofvBZQ0eICRliLIMJiFodKUH4SYSYKDAAbfpHF5ivDfEXCbFwP2JtiYQYq7n1uUFbUjC9KLoMDwzzeAAVISaTS0zMUy2PEqovEle7pnfxc6K8T4qWyaQgfDFDrNlqcmEzkRCTxbWwrUXDi/h5aXx6PLIviILYyMAIF6N2Tu3EUzue4teIbRPbYlSSSIjVK/UI/cUC+jkhphhlMrJfES2ToqgUIcRmpxNisoUxicRi23P75PbUDDGZuKtX6/jPM/4Tay5eg2P3PrZNiE2nE2LythK3pzyN0jLZi4SYbJk0FcSE9WHX025niEWyfC1HmUx9+NeIWKwfgIHFz8B2qOy7oi2SCJKQ6ZU2nUkemY4ukpeXRpuRiTRdLhaB/jIO3hfu9a3EQwNKjvcrZZ8hh9xrvhsT0ZZs0dTtx4Tt0C9VCmJFKyFDTDfKJNC+ycgjQyxCWFRq/EYmZpvUCWLhr1wQsyTE5BEEee6MV5EyxBSEGNAmxCjh80mjTNrYLl1ZJpMyxGRBTLyx0gliotDnS/tYtQp88YvAn/6kp+cKUIwO8+DxB4DcQ/XRH4TYNMKb3Wb0nGNKiA1FhoivkO2WXgtcQABCQiyy3ChBJZ67ZItk1a9G5q361ci5KN5niYjTCHEy+aGbVw4E19k+5XlTBTHAThArUKg+GyU4KUNMHC10ujmdaG3T2hwTCDEmjMwdnBu57jIhQ2e/ZOceFnIvViRDjIk8TKBKIcSSRmqcVZuFgXA8JKUgxkQRNupj2Pc0Qky0aIqE2OyB2YmjPopClhiqL+ZPifs1i1+YmJ6IxD4kZYjJyzty8ZHwPR/P2+d5EUJM3FYRQaw+wqfbObkTT+54kv9NSYhNj0dGIhW3A8sjk6mnCCFWbX/PEctkAiE2PjWuFMR8z+frocsQGx4Y5ttOJMRkQYwJxmKGmGzLZqWyTFb9Kvaduy//nS8vnEY1KimbptFs8ONClyGmtEy6IMSAwrwAkEP1AUTvs3QjBLO/Cw++RSDEYqO9W8ahZBkNUJyPJFBpAsSVYpGGsqLY8tL6bmKtpIpmFGKLYh2l2jjJuVgZ7Jc2dlaK4EexTJIFqozWX5KtVyEk2wzoIPe9DNUvq1jlee3sLcVOKd7AsIugeGHKUkmEGNC+kY0F6zNBTHwIk0P1w/WwzhATDmwgOtxzRGRTZYh5iItZogiko7tEQUz3ANntUSbZetVq7b6oLJNyhliSIJZURbmpDGvLri0AAiKEPSjnFarPbl4zWyZnSIYYs0zWWl5EmIkRYn41USDyW15E1BpARZpXsgWKhBT8qHjmVeL9EAkxjWVSKZAJb7HF5Vb9aqytmCCY0OeKV4mKWtK8OrpMHs0ytn6VeuRmJiKIsWP2mGOA3XYD9tkHsSq4ZbLiVYwzxEQhQbS2UYLwkwix2QOz+fcyWB3k36cuoF+0gckh9zpCTAzVl/suCkFM8Gf5VKODo6iHx8aEHz+H8H4NBOIDs0ymZYgxWguIChyzarMiokuS5S6REEuwTIrfn84yKQo4pyw9BZvfuxnvf977OS02PjUeEdcSLZNTO3l+GKAmxESBaqg2FBFwYhlijbgNULZMsm2VlCE2NjXG26v5Nd73kYERvt10hFhMEGMCVT0qULHjQ8wQk885Yp/E5crfjSpDTBQFgfa9npxrJgq7bDtRCbFML7uKcs1VheqbEmIAX59aNzPEhPtwa8ukaMtLELFUI/ilkTUUC2NSWyqxKJV6oog8WUPnCYIR2Zan2KY2ghF5/Qj0FyXc3djOqrNfIr7vWRNiqrYsgv5j6+hCgEuj91RCa0mIlVWI8n2tZdLzvMjbWiBqIcxS4o2PLIjxkSbHJUGMWQi1hFhQEUHMhhBLtUymEGKsr8OCfSFJzGq11ISYqooyymSl0l6fLJbJpCrIQzQrFqg/Wh+NXHBdFrvRrIebTBmqr7uhFcPO2X5UsO2oqmn2FruJmLhUlS2TCSRTreXxkfWCaStxuixBTKu1/Ih4Vm15kRttWZjSWSZFsggIzhtJlsmYMCWF6uuotlpFT5PJbevmlddP/vvs8FQWIcSuvx5YsyZ6fuOdKWiovvhQRRDEREKMiRkVrxKxrTGBQpf7lTTK5KzarIggxkUzTR6ZbtRH1m5klElihphoWav4FXz/Vd/H9175Pcypz8EAgmNDZ5mcw+i2cDvoCDE5s0wUVGZVZylzsQYqA3z7VP0qF6iolklRKJGFmSTLpNhnkRATxTVRnIlYJid38hEmgUAQk3O5xqbG+H41WB2MCINihpho95QJMdEySckQE5fH8tpGB0cjQhfQzhATz0dWhFhDT4jJgwbE6D1FhpgsiLH12NXYFRErxX6xl6xpGWJ+eK6ytkwCxXmZ12plzxAD+PrwUP0uWyatQ/UJWUhKoctSdDHJqSKH+OtoM1MRhNAvHcmjJLaIhFhSfhZFrLQh4GxyuBIzxBL2Gcq+QNmevO+uBCrNd0OxVprSe9R8N50A3A9Fl/LL6kwJlskkbHGgMhCxJYijLmYpLSE2lDDSpCZUvz3KZEiI+dkIMdky6XvScM9UQmx4GNgWvp2nEmI6Qaxbo0yybS4KYrNmAZs2ZbNMJlVRbirDYpbJ0cHR2MALrorTCYwQU1kmxW2tI8QKZldLrFZ7JKxq04sLNcIxN6ATeWTKSyLEBiSxKSqm+ZERKaueryWoapUafM9Hs9WME2EePUNMfkiUl6ML2VeJaeI5nM3LzttphJjf9GN/54HmKsskO/5VVXBCjCKIidtoqjkVIXlE4UIM1W+1mpHbRKUg1kgWxLgwo8oHCx/gxYf8STSU08yqzUJdtEwmZYgpCDHWhxeteBGfTiuIMXIpJMSYdVQliPG8NUHEAvSEmPh9sL7VK/VE0SxCiM1qE2JiALx8r8NH5JTENbFEQUy094l5XSP1kURCrNlqxoh3UZQZqrYJsXqlzv8/Pj2emO8mWmOpo0yKttpj9jwGrznkNThp35Nighj7/pbOXYr7n7kfQDIhFhPEVISYJLKzUlkmxRIzxNh5VBx4AQiEz41jGyN9Yv0Vp3l8++NotBqYakyljzLZq5ZJG0HM94FmsxCWydh9eEZCrBO2Q2PhQgUnKMgaG1FJ7pfcD2W/Uug2ncCY1HdK6DxFrEyj6YxC9QmZZaplkgQjAnkoE2JpQp0ueF+179mSjsb0nm4/JtCJ/VL9KQMWuVJC9YHozSLgjhDLZJnUheozy2TWDDFGiAkCYOTNlCiIVYMbhwghJgpirKgZYvIJoiL0v5OjTEYyxAwJMYplsqL5Xgom5DDL5NzBuTFbravib9PDm1dlqL64P1EEsYKIEYklBf/KtFUsVD+REIuKWjGBTGeZbHnRaVsVLSFW8dp/l4kwVai+eP5JI7Vilkkq5aUQ7RIFwBRCTBbqlIKYruaHNrJ54YNrQR4Qo4JYeH73ETnHMOF7Tn1OlBBLyHoSia1p6SGai2jVoYiQkEiIsWutxjI5qzaLD+gyIQlUXOyotgkxZuVUEWKcblOQUWLVw3eZqpy0JEJMtTwmfMmZbKJwMVQbUgbFi/tkvVqPCEGi2KUkxKbbhJhMIInrrBIGeb+EUH2RphNprOGB4URCDACe2vlU5HfRwigSYnPqcyIWRpF6EgUxWWg1CdUfqg6hVqnhW6/4Ft589JsjwhPrPwAsnbeUzz8yMMKnE7cDOUPMj+9bqlB9scQMMTlHjxUjNkU7a70i5ckJA1iMT4/z5YqW10iGWBZCrCjXXJtQfZEyjxFiQWNdsUw21ffhtqNMpgoledkOLcLdxbZcWyblz5T9IlBkVLtnGt1GHqSAQtPBsC2doEmwTJpaY5O2J+97yvdMDrnPKFAphWTdvkD9bjT96ocqCbGile+nEmLizSmQEyH21EZg7xH+8L9wKMEySQjV90PdtSKKVw4IsYpfiY5uExHEBFFOJ4jpguN1hNjcucAzzwRtyzcxORFifgtAvQ4vvBfnofqibVWXIdbDlknxguSy2MND3UWG2AwixNqWSS9u+xPeo8i2R9kyWYMgnskZYl5FT5fJhJhGbBJFLlm0qkp9jGWIiVZNX0+Eiba9WJ8VhJh4E6YT01RkWoQQkwQzJohN+dAfs6zOPRcYGwPOPjv4vSD7YIQyCJFimRBjL2EWzFqgzBAbqrYJsapfjQirE14D4hEp2tPE62gqIZZivxyAj2k0MelJhNhUm/6py5ZJiRATaSqdEAQIhBgTxMLvc7o5za8XcoaYzjIJtAkk0QrJtomKEBND9cVtlZQhVq/U+bZttBpc4JEFF/EznTCoIsQGq4OR42pkoE2IjU2NRQgxADzHbbQ+iq0TWzE+Nc6P2cHqIN8+c+pzIpllSQMeiKH6ppZJUThkywfihNiyucv4NEmEmDyaoyiIsftGWXRnxfojjrap6tdEYwLV6eDcM6s2izsXAMEyKewv8gALovA1PtUm5RbOWohN45sAlIQYgHioPsDvxYpCiFmH6osP9pRsKQohRrT4JZE8KiukTlTSCkaGfRf3cRtxjWovNckjMx3VUkekGbdFEOlUbSlHL00Q4HT7XixDLEXM0xF3pgMeUL+bNCKNSjqWofolIVa8EkL1k3y8Yr4GkBMhtvwA4E1v4r8zQszEMsmFPfaxQHy4yBCLodpsSaJlEq22kEPJEBNFHx0hNjranl/+W14ZYswyyQgxlWVyJCQlRkYQK1kQq1RmtGVSJMTEi4jLkjPEnFgmCyYsxkq4aa+2oLVMOiXEhIfGgaYfFdNafkSIkoUqUeSSLZOxDDEvOUMsJkypMsQS+qzMENPkgKWJabHtLuatmRJiw8PA298OLFkS/F6QfVB8ucHORDFBLHwJs2BogTJDTCTEREECACZa7aB4IMg0YvNQLJM83F1BYkXErlawr+oJsWiovkxsRfKnUggxJojJ2WaRvKZqdIRMlUVTDIRngkvNr0XuBQK7ZzwoXiQe65V6oiDGCKr5Q/Mj4goTo2TBRfyMYpkUg/DrlXqEFhypCxlikmVSLCbOjE+PR75X1tZIfSRKpDXaopJ4HEdoOk2oPiPGRCFIl9XF+g8Ae8/Zmx8HwwPDXEjTZoiFlsnp5nRE+FQJ9LJlMmkEUJEQG6wORqyx4giZovVSbGv2wOwI3caWy/JqAUaIBfu6kwyxbr+EajbjGWK6+wdAnyEWngeKlCFmS4iRsqyIwe0kWx5BuKCIWCSRJ0WgYvOJ+7iNuEahrBKnsxCMqBSSsXhIsHGSCbEMgl8sQ8wFIabZ98TPdHSbMnvPgpKL9CvFZtsPVRJiRSsby2QOhFi1CeCBB/jvDG+3IcQilsnwfG/0FunRR4P20iyT7F4nKVSfQohRM8Tmzg1+6kZzdD3KJLNMhp+3KmGfREHsH/4B+O53gdNOizeUlRArmCDGM8Tqo/xmI29CDAjIvMhWo1omCxZonlgRW4cXs/3V0O53zUsmxAZaaRliUqi+PMqkSIjBjwtGwu++5xtZJhMpL4XtUXwQlx8iI31W5ICJ53ATu+VAZSDytjiJECMLYnIVZB8UX274zZAA1hBi7BqXlCE2VBtC1a/Cb3loei1MQhLEVKNMSoQYE1AiolnKiJWc2JJC9Tfs3AAgEO05IVYFmo3piAA11ZyKjlCYQojVRUKs2eTXuMiIfuE2YaIZW94ew3vgyR1PwoMXEXQjhFiFSIiF21C0TIp2yIHKAFbMX4FPnfYprFywMkKDsfO3ihAzsUyOTY1F7H1zB+fifSe8DwOVgYjAqbJMslowawFWb1mNsakxvo8NVgexfN5yAMCyecva4s1U1KIpkl1D1SGlmCcTYgtnLcSOyR0xy6RY4jZvCd/fSH0Eu8/eHY9vfzxGiE02g+UlWSYBYPOuzQDi57YFQwswNjWWapkUrZx8W1WCEVnZd8otk4KNUyYPZ9dmY6g6hF3Tu2KEGKtKU7BM9gIhZmOZ1LxUKwIhJjs1jF52qwgxCxpG/Ixs8TMRLjQili4/yzQ3ikSIacQ1CmUVW8cEIZISvE/JuzJdHiVPTlyeapkUwUiV+5U0DUC0MGpEJUpYvvgZ5Xum2D116yd+ltavfqj+XOsil4Vlko125IoQG/DCB37hAYCPMpmUISZeyGOEWPAf25Fo/M98NmhvPLhRi9hskiyTTBDz0BaCTCyTaYQYy+LRhde7skwmEWJ+2Ce2XtUq8NKXAl//unqkOUqGGEUQK4iQwy2TeYbqSxligJDdxsrUMlkQOiexBMtkVc4QixFiFXoOGHwMCO9gtHRZU7Zb+rEsM/YwV/Eq8Lz2KJQqy2QsQ0w4b8iEmE54k8mt1BwwWUx0RYiJo0xSLJNyFWQfTAzVF84xiYSYIkOM2/MYQSWF3KtC9UXiJ8kyKRNirVYr2hYnxNr9bjQb+MvGvwAADtztwEiG2A5hdMNl85bxvot0UVKoOaCwTIbF7gl8z8dslnkVCnCMMFo6d2nQbjh6JbsW6yyTqgyxWKh+NS4E1atBYP57jn8Pzlp1VsTazM7fqgwxo1D96bHYtvrEqZ/AR07+CIC2GLVjcgee3PEkgICyEou98BOFmaHqEJ675Ln4xQW/wJde9qW2ZVII1R+oDET6L4bqi2KenCHGlkexTLbQwlRzin9/s2uz8fKVL8fi4cU4fI/DlaNMyssbqY/wbcPsiDLBylwAY1NjaLVaifufapTJwepgZDswQSxCiEmWyVm1WZFtyrYDi+cAQkKMWSZ7YZRJlWWyXm+/tDUeZTIcwEoada8TFRkh2JIQM7bJUbOXKLY8A+EilRBzGGwuz6vsVxI5RKCs5OmShCyloGIp8lDoPZMBD8RpVMs0FfN0YiVF+KSItlZZeGliF8HuaWQ1LkP1yypUEQgx0TI53ZzmN5bi0PM2JQpiACI3D4mWSTG/ilUiISbYlmwIsXBZokU0ciFWZYgBdqH6gH40xr3DG+rdd1d0uEOEGFuOSIjpSkWImYTqF+QhmtWWiS0Acg7Vb8YFsWZNuuHr4VEma03EbX+aHLB6pc4F21rTi07b8mN2y+RRJr04IZYQOs9uwNk5RUWIJdkrY8tVWCR1o06miVo6usw0kL8XCTGVINYQCLGpxhS31ukyxI7Y4wgsn7ccZ608C0BAJwIpglj4YM+s16wtlWVSzhCbak7xm0yWIQZECbHVW1Zj1/QuDFYHsXTu0kiGGLNLVrwK9pu7X7BsA0KMWyYlck0UMep+e4TMnc02OcYEOHa9Z+vILJVGhBghQ0wu1pbOMine4yQSYkKml/i9ysVE+rXb1vLz+QELDohMI4pBYlue5+F5+z4P84fmR0P1he3sez5frixqsuUN1YYi93NseeIok3Lfxd93Te/iguXwwDD+44z/wLpL1mHhrIXKDDHx+2D9YiIZJ8Sk8xN76dlCC5ONycRtKmaIcatqVT14gjyaaEwQq7a/Q6Vl0hUhVpR7FxUhVq22g/OpGWLh+Vu8nnY6WN+pZVJDGFEf2CkClasw+dh0KcJFqqXQwDJJse7pSKzIdIaWO63gRwlup+SfabLb5JB71efi77r9ykr4TLFWauk2gggpLo/83VD6lNFq3C9VWiaLVp6XTogJb0+f2vEUWmih4lV4ToRtcUGM7RbCjbaRZTIUVVgWGju4rLDqZhPe448DAFrTwQWfo9peJZohFiHE2IUBekJMJ4jphKNjjgFOPRV41rPife5UhphMiFEFMVHom8mWyU6E6ksZYoCQ3cZKZ5lUjRBVlJvzpGo226H6DShC9QUrnxfP66o1PUxWWqEAlmyZlMW0gcoAKs1glMFaK5ohVm15MUKMzStmh7Gfsnims0zWq3V48NBCizTaYxLlpQzGl0L1EwVAw0B+Z4JYgQgxNkqwaJlkNIsHD/MG53ELopwhNjo4igf//kF+zWTElhxyryLEmEAABMKFMlRfIrHYsoFQBFFkiP15w58BAAcuPBAVv4IBr02IbQ9pn5H6CBaPLG63Q8wQq6cQYvVqHYNMEKsCOxpBfyteBUvmLOHrx9ofmxprWzgrNU6ONVqNiA1Q7FckQ6yanCEW63u1jp1TO7WWSZFII4XqJ9j7gDYh9vDmhwEE9zKi6AIA8wfbGWL16aAN2cIoCnDy8t5y1Ftw/zP3Y9m8ZUq6jR37TLBSEmLS8kShcNf0Lj4IARP4xPB/No0oPtUrdb78oVogiD0z/kyUEBPOKeKojzundpJGmRT7ECHEwlB9cQCCerUeE1ojhFgoDIr3sZUW3FzbC3K+U2aI1WqB6DU2pr7/0lkmhUe4ycakcv/PqyLnbuG6bhWqryPEiMKFSpSg2PJsaJ9Yvwg5XBQRRGuZRLowSA1IJ41YqRJ5dIJfRnpPOY0m5J5iL3Uh5nmeB7QsxEOCWJn5u9GIh1RCjHIM9kuVgljRyvdTCTHxZpGFxC4aXpTZ98tuVhiGnWSZbLVawQHTbLZvMFSEGNOnwn5ZXTSffBLeZKBGNBvTwMREBNWOiGxsw0UIsVZbKGI024Lwxm94OP4wKZ4IxPB5lXD0utep+5zTKJOVFoJRJsPPW/JNXpog1muWyfCBau7g3MgFwmVlJsTYwAXN5swixMJdqdryFIRY+030gER51fwaBprBg3pgmYzaHkWRa0AlNjU9NPyWwm5ZSbQUsnMA1TJZ8StxG2ilhsnGpFLUktcvYl1MCdWPZIjpLJMKQowyymTTBxq1KuiPIGEVRJSNUAaKDDH2Ambu4FxU/PY+IGeIAdGbOEZsMUJsujmNDTs3KEeZ3Dy+mX/mez7/XiJ5UBKJxZYNhEJoK06I/fnpQBA7aLeDACCSIbZ9KsyDGhjBHrP34OshEmJi9pJcAy1BEFP0KyDEguXtqgLbmwF9MzwwjN1n787Xl00LRC2T7O87p3ZGLJO7pne1beRihpgUqi8KM3Kxz1xZJkXLq2p5TEB6aNNDAIIMtTkD7YwtDx4fXGB8ajxCvYmlCtVnffrciz/HpxMtk+K2ShLE5P2Y98vzUK/UOYklEmJiDVaEbDPBnliv1jmJOFgdbBNi42pCbLQ+ykXQbRPb+MORbpTJJEGMEqpvSogxIcDqYa0o11yVZbJabYteqpcbqpdqUqg+0Plgff6iVsrytSXEkmxrphRSVlsedXQ+VdaTtu86S6HC4WATqk8NSKfYBY1EJSK9R7H4RaZJ+G6AFJpOeB7IKuYZ0VhUYZcScp/xu1GRjrrlpdls+6FKy2TRyjMbZZKFxO45smfmRe+/YH/Mqc/BUfWlwQcKy2Sj1eBCBBdWgKiQEpJHccukBSG2Zk1bAAKAP/4x2TLJN5xkmZQJscWLgcsvB772tfjyqIRYB4WjVEKM0ieAZpmcQYQYszqJGWJ5EWLRDDGJhlTdsIol3+xabMefPvhTHPvlY7Hi31bgeV99Hn84yqVEy6SSEBOEGilUnxFiQFwQG0ghxGp+jdtI5HllQkykG7hl0ku2TMYyxMS2hXB/WfCSBTIdIWaaAyY+ACuzyyrJ/WCCGABM1mbuA2JkgJTwcBAzxMRAfQCJGWJycUIsFMTefd27sddn98JdT9zF55EJMfZdJlomheP1hkduABA8vHue116eSIiFgtjBux0MoB1FMFkBtoVCxUh9BHsMh4KYQIi10OICgdoy6Sv7FRFEBMvkjmY7W4oROPwFWNi+LIixvoiWSZnYEqdVUWQ6yyQnxGwtk4pQfaVlMhStWTuLRxZHRtcUqUCdQMXabraanNbSCX4yTScez0z0iVgmK/G+i/SXmCGWNE2SPVG0TCZliIn5eUw0AxSEWEKGmEi4qUL15Qyx2bXZ6gwxeZRJ4XpjkyN29f1X4+PPmQjuBbt976KyTDJCjP1fLg0hVvF8/vCaFqzfaDbws4d+hvdc/x7cv/F+2zXglWSZtAnVB2AkXGS1AeqEC7FtV1lPaZZJTogJ+7dOXEuze+ooK+p0KnJIl2tGpulSRCUqIabLWzMV8yh9z5rXZZMnl2rr1Yi2SoFYQ6Sl2Wz7ofpzrYtcomUyQaUVRztihNji4cWZFz1/aD7WX7IeP9r94uAD4WFpsDrIb8R4sH6SIAYAlUrM+mmVIbZmTVsA8gDceSfRMqkgxIS+4U1vAs45J768JEFMOpFM+E28/gevxzfu+UZyG53KEGOVt2WyIFQJK5VlUswVcFFs29eErzISqi/ezLLf5WJ/Z9vWYv+49NZLccf6O/Dw5odx62O34pY1t5DnNS7BMimH6lf9KkQeSQ7Vr3iVqKglTivZIFV2y4EEMa0mZ4hV4hliSZZJUfDi6yBM63kef9iWHxKVwfiSYMb2vbQRKsW/s7fqXMRTZJfpwv2jgpjFYCoFOZaVGWI+YoQYI2qSMsTkqod03URombz50Zsjf587OJdfR5lAwMSAAxceCABYPUFoJAABAABJREFUuWAl/04aPtAIB68ZmxrDP9z4DwCAdx77TgBCZpkQqn/v0/cCAA7ePRDERIGKWSbn1OfgxH1PxFB1CCfte1JEYGF0j9J2mECIicLQoCcQaaFlcqQ+gn1H9wXQFhmTBLFVC1ehXqljv7n7RWxyYqi+TYYYE1R4hpjGMkkK1RdELFVbh+x+SOQ+4dDdD42MwigKQbpML1EgYy9jTPoeGdFRyCxLCtUX+6AjxNh8coC9uC8N1Yb4Oj899rSyTxFBTLARJxFioiAmC3DMMilOoxqsQSTE2HYfGRjh57qAEGt/dzY5Ym/+yZvxgeMncPdiJJ7vbnjkBpzznXPw+yd/b9y+SbWaDS6IDYiEGDVDTEGZs+9QlyH2+PbHseo/VuEl33wJ/vm2f8aHb/5whrUIF+0wQ0xsz9Z2aC1QJUwT6ZOOsqIIcCm0mSgGyZ+ppiHZ8iiEmCYwn0LcmYo8ZIsf4bvRWSZVghFlkILUbLqkthSZZaQBD9Ly1ghiF8WuSybg+jxUv7RMFq0UZJVcew4HNNi6bev4A4ILQQwIb7ZYB6QH9gWzFmDn1p14ZvwZLMfyqCAmX8irVbTCIe+9UHd1Qoj99rdoHK4e3aYSWm4ihJiHOAVVaY9ydeIVJ+KM/c/AJ079BJ+Xl4akumnqQXzjD9/AzWtuxusOl6yTnR5lkpUgiN2x7g48uOlBnH/Y+e2/s/2JWUcLQIg1mg189JaP4tbHbsX3Xvk9/qBAKfZQkmeoPmuvKjTbFAWIWq0jhBjLv9ljeA88ueNJrN22ljyvcUlvsWNCVAIhNlAZgOd5bUKsGR9lUibEYnZEPq+cIebHbY5+W1wCopZJOW8siRhjy2frqBKxkggxJqbV/BomGhOpOWAiBSYurzHdUGaXRTLEZLul8OwzOWDxXqsgtGckh0ZlmdQRYlNq4QKIWybXbg2Ol4+c9BEsnLUQRyw+Auu3rwcATmKN1gNq6LWHvhbH7n0sls1bFiExJ1sNDAH4l9v+Beu2rcO+o/tyQazOLJOhqieOMKkixHiG2MAIjlh8BLb+w1bUKjU+YjSgF4wSR5lUhOrvqgI7WoEoMTwwjBP2OQH/efp/4rglx0XalwWx686/DlsntmK32btFbHJKQky2TEqWQrHyCNXXWSYP3v1gPPqOR/HQpocwPj2OE/c9Ef9113+126kORWgz0TIrVr3Szhpk1x4lvRd+NjY1xh9EYoKYyjKpEHZFMk/OEJOniWWICfvNUHWID97AtrssuoukHCPE5BF5WdusT2mWSaC9XyktkwIhxo7DWbVZmD0wG1t2bQkIMTE3yJAQ2zm5k780fnABcJR0vmu1WvjYLz6Gj9z8EbTQwoKhBfjSmV8yWoZJTaN9I8EtkyaEmPxSrRXkXorHiar+74H/w0ObHuKW2Ps23pdhLYLiL6b9SvTe3iJDTGzP5uFfnI5qy0sTZoD2/pY16ymN0lFmiBHEtSSRJ9WiSaCeSNMYijzGI3ISCTEdseVKzNOJlbZEWpo4mvrdZBTNxM904mi/VEmIFa18P5UQY6NEPbLlEW6ZZMG8TooJYbIgNiSNNGlIiPk2F00dISZdiFWEGOuL3DcA+MWjv8AfnvoDvnz3l9t/E08YGpLqoUbwlnXttrX8Ri7WhmvLZBohJnwH5151Ll73g9fxYGcAccukKhtNR5k5Xq8dkztwznfPwcd+8THctOYmfPOP3yTP22w1+Y19npZJvq+JhJicl0clxORQfeJ2HJ8ax+Pbg4ElTll6CgDg0S2Pkua1qlarHarfVFkmJXJLsBuyeQDER5Vs+RExbUAQ0ypeJRTTwr9JYloVXuKoi6pRJrUZYl4lIp7JP6m2R5W4pQvgFymw1OVqCLFaA/Bb7X1ysuan2mViJR73lqLYTatvwr/c9i+ZRn9TEWIRy6RMiIXboIUWt5DpCLFJr4mxqTHezsXHXoy3Pftt8D0/Jp48d8lzAQTXq+Xzl0fIQQCYaE2h0WzgX277FwDAp077FH+gbxNiwUo8svkRPsIkEyLq4T43UQW2T7dD9QFE9mV2LhNHfZSLLy+BEKtX66iHx89EBdjeDESXkYEReJ6Hi465CM/a41mR9hmRxrbx7IHZPIpBJMREGyD7XBbEeJC6LkMstEzqMsRchOoDwF5z9sJJ+52EF694MWbVZkUIMVEI0hFinueR7J5yJhsQPX7rlTqnvEQySrUdRPGQkXmxDDEmiDX0hNj+8/ePzKcixNj+zAgx1fqpCLHB6mCEcBNHPBcHT0jKENs1vStCyjE3gpghBpgTYo9tfYz//5F5iJ3rfnz/j/Hhmz/MHyof2PSAUfumNeW1lx8ZZdLUMqkixDQZYvc8dQ8A4GUrXwYgyNPLmrcqZvm6JMQS6Zs04cKRLS+SU9Vsr6Ncxhlimhwn1odILpaF/ZJitxP7oKOejGyARAJONx2J3hO/G404HhGMHIp58rzK5Rnse6kZYgZ2SNvjhhr23w9VCmJFKwIhtnTeUgDBTbdLyyQvdqGULpjsLT23TE4JF2AFacRHmWTr4fv8Ic6aEPvzn9FoBIJOnBBr98UbGWmvTkVNQTHq5umxp/nDB5UQe3h6Q/v/mx6Otu+YEGu2BEJs2TIuECaRbzsnd2LNljUA2radyPS2GWKO1+sDP/8Afnz/j/nvVz9wNXneHZM7+Mk7z1B9NSEmbCMTy6QlIbZ6y2oAAcHyrEXPAgA8tu0xzRwZS8gQqyoJMUHkEuyJ/CenvKAN1Rctk9yyKFomRRJNQ4hRLJNJGWJy31ND9QXRi/+kilqqeRPEtJgQJ/ydWW3Ytnrj3F9g4acX8mOeVOJxb3HMXPH7K3DaN07Du69/N779p28bz88qURCTCbGhKCEGtAUjXYbYBBpYvy0gwWbXZnMKDIiLJ6cuPTXWjvjdT6KB9dvXY/vkdtT8Gv7qoL9qTycRYuy8y0aYBKAeZXKgfZ3i/QpFCLJlUvj+IhY2cZTJZpsQS1qeTIiJJYogjGKTM8TY/6eb0xGbXGx51WiovlJUUtgOxeMBiIbqc2FGkcOlqoggVh2K0GZJGWKAkH/G+q6xTIqCmLitROFpbGqMC2Kq5bG2xNG9qRlibF7fC15arJi/IjKffI5RZYhpRwANw/5ZH9i2r/k1DFYH+TWZ20srilEmFaH6Q9UhTsFVZcukISEmnhNXz0XsJdRv1v8GAHDYosMAAA8+86BR+6Y1lUSImVomRUKMDTSisUwyK+jLV74cFa+Csakx/uxgW04skypCLGtwO8GWp7MUKgmxFBsgSXQxtUxSCCpLy6Sy70m2Q42YZ0rAUe2lJHqvmfzd5CHmdXRUS6ktnUXTaOTLNNJRI9r2Q/XnWhe5hAyxpJ2SE2KCIOYiVJ9XAiHGbl7YW0gurFSrcdKoWm0Le+zkIQpiNoSY7wHNJhqTwfLFDDHf8+Gx/vo+vIveyptoJQhHopDFRAdqhtjDU21B7IFnpLeKrjPERELswAOB04IHt6QMsUc2P8I/YiNrRfolkm9dtEze+titAID3n/B+AMAta27h1FdaiZYVNjockCMhJqxyhBCzsUwa7h9sP102bxn2nRvk/4hvvp1Xs9m2TDYQF6JEK58w+mOcEJMsky0PA4JLXwzVb4tp4d9UhJgsEDG6TDHKpCyAJQlkKtoriery4AUZaYJFVPyZKmqp6DJimL/4d1kQ+9nAY9g+uR03PnIjyCWeOwzPU//zh//BX//or/mx8cXfftFofrFE2lcpiDFCbFY0QwxoC0YqIWEgvIhOeA1uL14yuiRyQygLMYy+FMsDMBBeBiYwzY/F/ebuF7mG8cyy8ETBA/XD/DAAUWJrWiOIVSVBzNcQYlVECTGREAoFuMAy2Q7Vj7WVkCGm6pNMPTHhYnZtdkQ8oYhrIjmU1CdyqH5DT4jJJRNiYls6Ky7b10SRJ6nv7PsDosK6HOLPySiNZZJR+TJ1Kk4TG2Uy7NtQdQie52H/BXpCbKg6FMsQ01FrMiHGPh+qBctj20oUDyOh+gNSqH4oDIrCXBCqHyd2qCUKYipC7KHNwf3RmQecCQB4YscT7ZejOdSUMOiGK0KM7Q9JlHCz1eSE2NF7Hs2J1azin5NQfQUhRqK6UqxtRgJVwjSRPhEpK+2IlZRQfWH/tsm8ohBwct/ThEjKNFQCLjO9p/hudMsTt7vtgACqAQ9sQu5NR7XUDZ5gvO9RiLQUkqwfqhTEila+3yarEnbKpXMDQmzj2EZ+UeuEZTImOoiCmFyCZdJnAoDncWGB9Bap2QQefZQ/KLX4CaU9ymTkYVgUxJbsw5tpJVgmGSEGCCKSeOLRkFQPTT3J///gJunGwrVlUiTEPA/eSHAznzTKpLhe4v95vzRCX6cEsVarxTN2Xnf467D//P0x1ZzCdQ9fR5pfDNQHom97XJbSMikSYhTLJBPM2LY13I5s31w+fzn2GQ32645ZJuVRJis11FrqsHv+M7xvqDUlygu+9LtCTGPzyoRYM0qIiQ9zsVEmFbbHpAwxFSGWRHnVKjV4nhcX0ypqUYtElyURYhq6TBbEWDERhlQZCLHL77ocAHDeoeeh6lfxq7W/wh+f+qNRG6xES4ofZog1VIKYihCb1BBigmWS5YctmbMkOo0gnhy26DA++mKkWi2+vSdb0/x8unz+8shknBDzJEFsN0EQC4WtyQqwLRTERGGGT8cIMZ1lMjw2YhliQqh+Pdx/JirA9lbbMhlriyCIsW0sijwDlQG84fA34PzDzsffHPE3kfnYdErCKFy/p3cGsQM6ao0Sqi+Hu1NKF6qvy/RinzExTykeSsQdG0CDHb/i8sQMMZ34xEhJmQ4TpxEJMTHAnolOy+dF91n5HCP2Syf4iXZWcTADthy2jWThc8CPh+qLxB271sqWSRWxQ61Ht7avk0pBLHxheMxex2C3WcHxH7ufc1iTXvt+gmtjaRli4gs3xT1EmmXykc2PYMfkDtQrdaxcuJILo1nXM3FwK0tCjBM/GtGFIlxQrGa6tlSWyVQboEkuVoqYp+qHOI0ug0pJIdlaGE1tjlRbXoJARek7Nd/NVszTEVs6sZJiYVRmiGXMpqMQd7ocNXG+MlS/FMSKVwTL5OjgKH9AYDcbTi2TCYIYE59IgliEEAt3M1PL5JNPApOT8MKHNyawRR6iwoM+JogJ2y4pfF4piIkPigkkVdMDHpl4iv8ee9OWV6h+C5F1a8m7R7heIhWmJMRsLZMOhb7129dj59ROVP0qls9bjpcdEGRbUG2TbL+fOzg36Npk8NDUmVD9DhNi4X66bO4yPkLc+u3rIwHcTitimWzB87yo2BTJAVNQTwmUlxyUP6CwWw5EAvmFm2x4MZFLN8pkkgAGBOcxXYaYHKIfI8EUQpg8Le9jUg6YRXYZ7wcTxFrRY9dIELMkxFqtFs8l/H/P/X94+aqXAwC+8Nsv0JctVDRUP/zMa/cpKVQfaAtGygwxRlAJhNjec/aOTiM87KvskkFnmqiH23sCjTatOXeZenmMEAu30UG7HcSnYUH4qgyxSFsyIaYSXZjgJ2eICeHy4iiTO5rJhFhSqH5kGkmkA4L9f+XClfjG2d/AgbsdGNl/KeIaI4IO2f2Q2DScENNkiKlGfVSJSqpKskyKFkYdIaaj25K2Z8QyWY2TUTqLJiPEVN9fRBATB1UQCDHWvigKKzPEqlKGmGL9VAKcTIiJ01EyxDbt2hT5nJGHlRbgeb7SUkYpkRBbOwpMCRRVq9Xix/OK+StwwIIDAORrm5wKBTFx1GorQkxhmUwixO55MqDDDl10KKp+lWfJZV3PpAwxo1B9CiFGfGA3JqjyEF00fSeF6msoJJW4ph3JkEiIZbHcUQk4irWSRO+ZipW6TC9DMU8rVhIEKqo10WRwCK1oRrD0yssrQ/XLKlYJlkndTslsk2y6RcOL3PUhIUMshvUyYUV1EVeE6sP3OSFGumiuWRPMvzgQ+xgQ0QxPAOKbqYpXiQpiwraLEWLVKhrNBlZvXs0/UgpiCSTV4yNBwDKrWBBrnoSYsG5JlknRCppqmVQQYm+/5u04/r+Pj1sHHAp9jA5bPm85apUaD3v96YM/Jdki2APQ6GBIiP1LEHTdGh9LmsWqlJZJcX/qQIaYSIgtGl6Eml9Ds9XkQfvOS7JMAhLJJFNfiRli6YRYTGTihBiieWNyhphomZRHmZRoK2WGmBDAH/mpEKLmDc0D0B45LZYdljCvbG9KGmVSNa1Mb4jzJgpi4gAaaSUe9wbH84adG7B512Z48LBywUpcdPRFAIBv/OEbfBQ8k0rNEJMIMfHhS0eIDTALo9fEum3rAOgJsURBjEqIhfv8pN9SjjAJgBNbkxVg+3RwnlIRW7FRGJWjTDIBDuoMMcEyOVUBtrYC0UUlwMVC9Svxc5iKEBP3TyAktsNjkWK/ZHXk4iMTpxmfHufXvyTLJKAXcFSVGKqfkiHGlkmxTLJtIOckitZEqmWSHQfyCJPiNOPT49FBFSRCDEAkR0yXIbZpfFPi+okjjiotk9UoKSZmxdX8GhYPL8bwwDAWzlrI+8aEb5Z3JhJi8Dx+vs5imWz4wNqxNtn/zPgzXKxbOncpJ6diERgOi4Xq10S6V3yppnohKd5fsHsPVah+QoYYyw9j+aNcEHNEiLnOEEscOTEl68k4Y8uVLU8jUFHIKLGfupwqiuBHyeGi9p0Syk4RZiL9yph/ZiNWuhLzKGIlKbOMSAtSxDxK3ho1Iy1NRO2HKgWxopXvpxJiQFQQ2232bkYXodRKIsQ8S0KMrYfnmRFiTBDbO3izzw5W1ZspLSEmh+pXKli7bW0EMTexTD48D3z5gIYQyyNDTCTECOTb+u3r+Q230jIpEWKbKpO47DeX4ba1t+Gah66Jtu/QMnn/xvsBAKsWrgIAHL/keIzWR7FxbCN++/hvU+dnN9lzB+cCrRa86WBbNzduzNw3sdSEmLDvypbJgfgDoDNCbN4y+J6PJaPBg31uOWKCZZKtt0hURUQtRaZWsqglCWTCKJN83ghdFh1BNimfSybEZNqq6ldjvycRYirL5N5z9saVr7gS337Ft6PzKISxtFB93SiTMbosQSCrJQhi67ev5w/qqWVpmWRh8cvmLcNQbQjP3+/5WDJnCXZM7sAd6+8gt8Mq8lAVWiaVofohIQa0txknxFRh5NzCGM0QE4vZrWt+DSfue2JCB5uoixliTBCT7GciIfbI5kcw0ZjAUHWI5/UAAiEmCmIqQoximRQJMYVlsl5pjzIJAM+0guWRMsQUmWWcWptoj0Spuj/h4pqm76LIMrs2Ozb6oTiNHEwvVsWv8Ol4CLyFZXKoOhQltjQZYuwzMcMyqe/yNmD7rSg8TTYm+WipuuXpCDHWd3GgmXqlHhOoAES2tXyuE4VBtj2VduRqe8TRyCiTAokmzisSYp7n4fY33o6733R3lBALBTiWd8ZePsyeQnDfGF4LbC2T7L7zkR1r+d/Yy8K95+yNodoQDpgfEGJ5jjTJCLEBcTWqVeCAYNlYsSI+k3j/ILxcBhAQYikZYr9/6vcAwEeVZaKoM8ukX7EXxFSEmG04uILG0hJUFNGFGtxukulFtUxqxC5SDhfBMknZDqRw9xQ7KynfjSDMWGWIpYiVaZZCEiGmyCyzHdWSQpKZ7Hti36kDHpSh+mUVo4iEGMsRAxwH6gOpGWL8poSNMpmSIeaxg8uWENsreJBht/0N4UKcmCEWIcTigpg8MqQYRJ+WtfVwcK+GZ+/1bADAUzufiobBu7ZMUgkxRYYYoKDfNILYjfX1/AJy7UPXRtt3KIgxgoIJYrVKDc9f+nwAwE1rbkqdnxNi9VFg+/Y2XTLgUBhGQoZY1lB9g+3YbDU5ycgewpltMrccMcEyycUtUfQRRS5U4sJQuK0GGl7UItmqSPOq6LLwb00PNUHwqba8REKMZ4gJ5wJ52rQMMTEYXxamAODVh7waz13y3GhfUwgxncilzBDTiWm+lCHmeTFBDJBGldWVpWWSj56424FhMx7fLrevu53cDl+0kEMjE2KtVitGiAHt71ufIRas34TXSswQ2232bvjiGV/ElX91pVKYAoJ+cMtkqx2qn0yIta2rqxauigbvi6NMNpIJMZJlUhxlMsEyyZYHAM8gEF2yjjKpo8hUfdcRRkDwkK66F5CFtaR+MfHFlBATt4NMbGkzxOSgeM0ImZwQk0R/cZRJQBCDNJZJTohpMsRYn1gfuGVSaFcM1pfPdaaWSTFDTEWIyeIh688+o/vwfnBCLFw/9j28+7nvxnvHjsS5f4I1ITY+NY4ndwRE2NFPB9/B6p3r+N+ZIMYEok5YJifZdVXMu6jVgE98AnjgAeD00+MzsfsH8b5DRYglZIgxQuzwPQ4H0N4HHt70cKaICTG6RDyGjUL1FQKH1vplmFNFyvSyFBIo5Bo1p0p+trLNeqKsX2Q6TZ6VuH5NZBQrTbYVlRAzFSsJ+5VOXIvQe1kIsRRroul3k7Y8se+2Ax70S5WCWNFKyBDTqbQiIeY0PwxoP6hnCdWvVtuDAwhvtdhDD+miuS64eWGWSXYPoQrzrPiSZTKSIaYQxELR6MCFwYPd6i2r2+uVIhw9FApiR+xxBHafvTsA6SbK1DJ5yy3A3nsDP/6x8s8mhNhUY4oLJXsM7wFAEMhU6yWdaK+vtqmj6x65jl9MrNZLU395JiqIAcAp+50CAPj56p+nzs/eOo/WR4FNm/jpO/ZdZyz+Fkq0TMoZYlTLJDtODAixx7c/jonGBCpehRMuLFi/E4RYrRFaPETRRxBiIsH4Uuh7rRlcVpmYWGtFRa4BVOOEFp8XqIg0mWyZ1BFilRoqfoVf1HWZYrG+VwYi86oe/LWjTCYE4ct/V2aI6UaoFOblgpiwfViWHtk2aUmI3bfxPgDAQQvb2VjH7n0sADtBTKR92cuShg+g2cT2ye08Jy9CiIXbhQkOKuFigAtiDW6ZlDPEAODNR78Z5xx4TnIHm02+Tz5ZGefnHfH6CwhEWgW4N/wOxBEmAWCAjTJZdRCqz0bRrAK/2XA3p5rETKcBQXze2EoW4EijTBL6ZNoWoLZLAsmjWspFIZpU5Xs+3xaiQDXdnNYKrTHLpGaEzKQMsaHaUKRtRkEqLZOVaKi+LkOM7ZuAOkMMiFomZZu2SK7piDseqt+Y4PlnESKtFiXFmFCnFDRlQiycZ+XClbh053FYOAZrQoxdH2fXZuPojcGyH9m5nv+dC2Lzgm3ChKL7n7k/et/jqlqtdhSBLIhVq8D+cVISgDpfTJEhprJMPjP2DD//HbboMADBCLlVv4rx6fFMsQsioSM+QNsSYkkWOFPbIXUkwzSrWaRPKaILJRdLG2wu9EvVjri8yOiDCSJPKiFGEEFIohJBmElqS0fvUQY8IImV1JEaDcRKXb8ogzWI29xWoCIRYgSxOdL3FOtoP1QpiBWtxFEmiZZJ54IYe1CXbgpib+mIlkmfiV+iZZJCiIVB6d5gcKPFCTHEswuMCLFqlb/pP3m/k1H1q5hsTLZvDmRCrFKJEmLzgp/L5y1vv1UU8XNTy+QNNwDr1wPXXKP8c2yUSU6ISRNWKnhs62NotBqoV+p43j7PAyDkiKUIfS0A1/ntXLV129Zxkisyfw6EGACcsjQQxG597NZE/J9VxDK5aVObLskpVN9vtUWxZlWyTC5YACxaFNgddBkgFoQYo/v2nbsvv9HMXRATMsSYZVIUjqrhZcNrARV4cWJKELXQbEZHjhRuAGpeJU5KCSMoeq0W/73aij4Qi3afCCUq9CMpZN/3/PYgAXLfZQun4iFclTvGfiZlhsl/z0yI+dEBCs5ceSYAg2D9jISYGBbPBLFfr/u18YNk1DIZfhYSYkwEGKwO8gd1oP29MsFBN8rk5to0p11kyySpWi1umbyvFpxzFg8vjvQHEAS4qnqESQCoo011bW8kZ3pRCDFGB/7fAcBzfnwmXvLNl6DRbLQJsWodHsD7vpFAiPG2KYSY4rgQ56XQZkCyICZTLxWvorxniBFNRMsk0BYjh2pDEdFIR2yxz5jNUSf4sYcZmUKdVZsF3/P5Mtk2JRFimgwxts09BDStKkNMtEzKBKtIyrHtqbNxAuCC9WB1ECftdxKWzl2Ks1edHZlON+Io65tomeTFziWWhBizS+43dz8s2xF8B4+MxQkxRnsysXDLri18ezutZrNNXjeFe1LdQEaAWhATXk7qLJNX/ulKAMH5mu3vVb/KHSZZaLiIICbclxqF6lMIMYWIRbV+UcS1JFEJSB75UvxMZzVTCUa6IHVKULxOUKFkpEWm0/VdReZltLOSBSqCZZIqVqaO1EjM2NLlu5kMUkAOuafsx4RMNiD74BD9UqUgVrQiWiYjgthIToJYFkIsLVSfQoiF4e9e2H6bEGufwBMtk5EMMWlZAiF2wIIDuAUtZi1MGGWSWSZXzF+hHrHHVDhi23FKjbxzi2i4P7RHmZT2j2o1kjfF+sbtoSqhT1ivBxcAj3pbMVAZwPFLjgcAXPuwYJt0ZJncPrGdv7VcuWAl//yg3Q7C7rN3x/j0OO5YdwfWbFkTsYGIFQnVFwQx1292RUGMX0Bly+TAAPCXvwC/+12MuAOQKUOMW7SEzCJumRSGlHdaEctksD1FgWgYwf9nTyLyhrptewwfBJte8HdOiPkYaEUtk7FMLW6ZDNtutOdlQoiH4AFJJsRU4frsd13eWKTvCT/FitFdwrTieU0WyMQsMxNCjK+vSIj5Ps962t2fgxP3CTKwrAQxiwwxURA7Yo8jMFAZwMaxjVHrOaF0GWIquyQQF2NUQgITqB6eHQhEc+pzlDRWegfbhNi99eBcJNslgeioj38Ot5EsiDFCrOEDWxqBeKG0TBIEqnozeut2y6O34NJbL22H6lfqkfyzZ8JQfV2GGCvVPs/EDPZ9pRFi2r5XCYQYYRsAbUJMNwBBUnFBrDqUKvwkfaazTLISCVIAmFUN+izvt7rlsfUbriUTYuLyPc9TEmLL5y/n1zFZsBcJMfY960bRlPuwYv4KPHLxI3jrMW+NLRdQbyt5mojQLApiFoQYC9Tfd+6+XBBbPdYmoti9EhPCZtVmcVt1LrbJViuwOEPKf1RR5WItXQqMjgKHH97+TLjHTLJMNpoN/Osd/woAeOvRb438ja3zHzf80WwdhBKdGkD8xRSlVMRPoshDDCMnj7yXIvIAUYI5qe/kIHWCYEQRLrQ5VWKfTGk6gthFyRBLtbOabCuCQKUV4Aj2WSohpst3U2WW2Y5qSSIdbWk6zXGTZrPth3IbuFNW9hIsk7qdcsnoElS8ChqtRn6EGDFU/8G5TdS2rIkECCtD9X3fjBAL2/fCCyxrrykQYuxhacHQAmA8gRDTWCaXz1uOZfOW4eHND+ORzY8E4coakqqFtmVy+fzlXHRiwaXhCocdJZIXbDmTaiqKWybDEx4XZgTL5PoR4IcD92Db40HnVsxfwR/cHtqsIcSEfey68DnvhH1OwOkrTsev1v4K1z18Hd5x7Dv4eu0YAL598ARO2fSw8sFQrt898Tts2bUF+4zug2seugZXP3A1Dl8U3Ngtmr2Ij+AXNO/h+fs9H9/583fw/p+/H7etvQ2LZi/C7W+8nVNRrVYLN66+ETesvgFAaJl8ahM8TojRbpgbzQZpH5QJsYYHNCuKm9m5c5Xz75zciVn1AXgA1g3swp33/QAvbQ2iFqwMn8b3fOWD/e+e/B2AqADeWctk8PPi51yMG1bfgCMXH4l6ZT0+dT2weDuA1zTNCDExFwzxQP4BNnBt0wvmZaH+zWQyS2WZjPTHr8WmSZpWFuhUD+I6QszzPNT8GqaaU6hVavwmhf9NI7wx4rXZCrapH4oesREpmWVy2QrgyQ04cM/DuD2Papm8ds0NuPEFwCW/BvYgCmLPjD2Dp3Y+BSBKdtardRy5+Ejcvu523L7udtJ5gVV0lElJEFME6gPxBy4dIfZAKIjJ+WHkarWCYG8Avxx6GkDcLgm0BbixWpt+FUVDAKgLt1vbWIaYhhDjv6tGMhSOo1nVIYxNj+PDN38YKxeubLfRamFwGtiGYHCBpOXJ2y+N6gI0GWJSX3UZVPVKnUcWxNpJEJXkkkk9qmUSiBJinudhqDrELYBJbckZXjrLpPw7E7DZeX5WbRYnowC9HZIvX0OI8T6F34GKEBusDmK/ufth9ZbVGB4YjmeI1dJFrHqlDg8ef3hS9UHZL8W2ktcnsnxBEBMJmt8+/lt850/fwX5z98Nbj3lr4n0yE8T2G90PS3f8HgDw0Ng6rN26FnvP2TuWIQYEtsm129bi1sduxXFLjuPLfGrnU5g3OA9DtSFsHNuIf/rFP2HH5A6smL8CC2ctxMJZC/HC5S/k++PWXVvxuyd/h5GBERy151FB4+LozSLen0aIzZ0LPPYYMCRsG+HlJLv+vPv6d+Nzt38Ohy86HM/b53mYVZuFhzY9hHmD83DBsy6INHnEHkfgmoeuwSXXXoI/PvVHTkm+/Tlvx15z9gIQXEs+estHsXFsIxbMWoAz9j8D5x92Pj//MiKNfTcRtwaxlISYxnZIIms01koTG2CkT2kiDyVInSiCqPpNbYtiq5T7nkbmUUSsVBugQqizCdVn7TdaDa1YSemX6YAAkVEmsxBimmwwebq0/Zgi+AF68ZAifPZLlYJY0cr3SYRY1a9i37n74pHNj7gnxFIyxPiJYXoaXzsceONLHkblslX477P+G6899LXB30RCjK2H55kRYqFw41ejgpiYsXbgbgfiqlddFTygfemH4R/0o0zubE1ykmH5/OX8AWf15tVotVr43WLgt/OBZ2MnDkd4Egnb2zwEbA3v85bNW4YT9jkBAPD9+76Py35zGTaNb8KPH/gG3nQUcGGzgaS1nGxM4so/XYkr/3Ql9q2tw1krgOdNjyN+uytYJplVjb+NaK/jP50IfHHgeuDn1wfrNW85v9FjlFHL9/DUMPB/y7bgl4cCRy1YjTdM7wTjJm4In/NeuOyFeOHyFwLXAzeuvhH//Kt/xluOfgue3rkOL78Q+OOiCcz+4uH42PM/hrueuAvXPHQNxqbGsHh4MX594a+xaHgRntzxJN7xs3fgO3/+Tmx9rnv4OgDRh2pWpyw9Bd/583dw62O3AgCe2PEEzvjWGfiH4/8BN6y+Adc/fD3Wbw9yQEYGRnDyficD99wes0xu3bUVWye2Yq+RvSLC14PPPIh3XPsO3PjIjbjgWRfgXce9CwtmLeC2OvZPfivUJsRaaNWq+O2ewDUrgGcv2IxVWx7F1omtGKoOYZ/RfVCv1rFh5wa894b34orfX4Gj916Ik18A/OfglzH23S/gyNn74/I9gUX1XfjaL/4Jn/rVp1DxKrjo6IuwY3IHrnnoGqxauAoHLDgA//6bfwcAHlwOtAWxR7c+ig07N2BWbRZm12ZH9vmpxhTWbVuHp3Y+hdH6KOYPzecPP2NTY5F/Wye24qbVN+GXj/0SB+12EP56jxdjGxsFPiTELj72Ylx87MXBh60W3vOrcEHNJs/R2232bgAkQUwkxJrtUSZrDQSWSEnsmRcALpg7WYkSYmhbjNhDB5uHnUtYZh77qbJMJhFiTFhno5uNDo5i867NSqpIR4ixn1PNKdR8QRCTw/s1Fs2JxgRqfps2i2WVhYNrDIzMBZ4EDtrjUC6+PLHjCbzmqtdg//n7c5sZ+8msonc+fie+/advA8cDP1wF3Lh1LZYkCLpisfywJXOWxISVY/c6Frevux2/XvdrnHvIudxGk1bijeho+LS4Zi5w9fg92D4+CkBBiElijCp7ad+J4CS9ejhQs1T5YaRqNvHaPwLXLweeqAU7pzzCJNAW4P6yEHyEyaXzlkamGVBcDXQZYnw+lRgkXJv/+ch/wK1jf8G3//RtTvANVgeBZhOHPQXcIOgsKtHlpQe8FJf+6lLt8qjEFokQC9s6dNGhicKaPF8S+SULOKp9IalGB0cj87xk/5fg+/d9X9vW2avOxn/c+R+J/QSCAY+qfpXbCdm5hq0DE9VEMe8dz3mHUrSVBT/V97d4ZDGWzl2K1VuCuAN2vWMvKPcb3S8y/X+e8Z/41WO/wlF7HoX124Jrqe8FtC5FYPQ8D39zxN/gK7/7SvA7PKUIQhHXjl9yPA7Z/RD8acOf4uursEy++JsvjuRe3bD6Bnz05I9isDqIsakx7JwMrKwTjQnctvY2vh0YIbZ5ahv2+dd9cOjuh/KRO8Xj+SUrXoKfr/453v/z96PqV/GTB3+CW9bcgkargdH6KD5y8kfwX3f9Fz8XirXv6L542zFvw08e/Al+8egv+OevPfS1+PRpn0a9AXwthLzE0ZZTCTEAmCOdJ4SXrqsWrsLVD1yNDTs3YMPODfj9k7/H1+75Gp/0zUe9OSY8vuf49+CBTQ/gf+/9X3z5d1/mn19252V4zSGvwY7JHbjqvqv4PgwA/3vv/+LSWy/F0XsejXXb1uGWR28B0N5H2LXOKFSfID6ZWtu0mV5CW2kjUQJ0y2RS301FEIplUkebKUUzTd8pAg51e5JtgGlinsbOyttvEXOxCN9NmlgpfzeqdYx8N4T8Omq+W9p2p1BkAI10TDu++qFKQaxoRSTEAODvjvk7fO/e7+H5+z3fbR+SMsQkQuzf112Ft58d/G26MYHzvn8eNo1vwt89++/cEGLcMhncMLCDtSkp/jwUufl9vpwkQuz8c4BvXncS/33p3KVcELvszsvw9T98HWveED6VYz12OwXYa+0/4qip3fBFH1woGKoEuTbP2/d5+OCJH8THfvEx/P01f8/bffPLgH/Z8QcsvuKkYAQ1z8f2ycAqODY1honpdiAt6sAXzweqrauw6guHYmxqDNsmtvETIltvLogpCLFH50Y33fL5y/mN3sObH8Z+/7oftu31JDb/PwAIyIuv4U/4x68/Cz9cCpyyGngivNc+aLeDcMjuh+D0/U/HTx/8Kd5zw3vwnhveE/xxEVBtADuxE5dcd0lkmau3rMZv1v8Gp+9/Op7z5efgsa2PoeJVsO/cffHolkdx8O4H46jFR+Grv/8qgKhdkhXLEQOAVxz4Cty29jb8acOfcP4PzuefD1YH8aYj34R/PPEfAzFm00/5t91qNbF5fDP2+dd9sGNyB6p+FYPVQXjw4Hkedk7u5BeHy++6HJffdXmsD0BgMfiPM/5DEsTCZVSr+PuXALcvAYDfAp/fj8/nwUO9WsdUY4ov57cDG/Hb4wFgClW/irt3Pohj3gQADwA3fZDPKz6UPrz5Yfzfg/8HAHjv8e/F6w57Hf8bE8R2TO7Aon9ZxLdJ1a9iqjGF6ea08fD0rH697tf4Cr4CHBH8XlM1IwrlrRZesuIl+OY53wzoSgBnP1jBHYum8cINcyRCzMeoF9w8z92FqN0y/Pm+O2pY8dQUzqssiBFi+4zug/ef8H5+vDIxgT0kfuTkj+D5+z0fL9n/JUGbgk2Shc6znyxAme2DHzzpg3jWHs/Cqw9+NQDgv14aPPSwjECxYsSWJFgNVAYwNjUWI8QiPxMos4HKACYaExioDPDvUDnwgOcFdCSAQ3c/FHPqc3Ds3oEoxXJjdOV7PubvaOLBBcAx3z0Nz192CvYd3ZfbpoaqQ9g1vQtbJ7ai6lcxuzYbf9jwBwBx8gkIc8TuAL5+z9fx5bu/jAWzFuDVB78aBy48EPVqHZvHN2PT+CY+it+u6V0YqAzwMPiKX8GK7XWcfw/wP4cDf7X9K1jxy18CiBNisoikEgnOf3oxvtG6Bz8PRf4shNgbfg987yDgp+GuoBLEhhrB2eHPgTaMA3c7MHbzWUMFwxPADkEXUPV94ayFkd+VokujLUi+cdlf4cLly3HCPifg+keux5M7ngxynB7ehMt/Ahz9puBFDqC2aB6/z/G45NhL8NnbPwsA3HYpFhOK+bokZIjJQqmq78/e69kYrA7ilQe9UtkGEBViKl4Flxx7iXI6UUA5aLeDcNJ+JymnU9UJS07ATatv4gTPN8/5Jt5/4/vxuds/h/lD85Vi0KnLTsU/Pf+f8IGbPsD7JtfikcX46llfxet+EJyz2SinrzvsdXhw04N49SHBOeZNR74JP/jLD/DxUz6e2O9XHvxK/OTBn+C2tbeh2Wri0N0PjU1T9av4wat/gGdd/iwA7Tyu1x32Ouw/f/82oRTWi1e8GC9e8WIAgVD8yoNeib3n7A3P83DK0lOwauEqbBzbiDn1OXjVwa9S9uu/XvZfOGDBAXj/je/H0nlLlfeq5x96Pm545AYuYIkjXLKqV+u4/cLb8Y6fvQNf/t2XozZjQRBj+9vj2x/HUHUIL1j+AvzsoZ/hh3/5IX74lx8q+8hq+fzlGGlUcOn1wP+cvQz37XyUWwV3n717ZJ+95LhL8Lsnf4dv/fFbsfubrRNb8c5r3wkA2GtkL1x4xIVYvWU1tuzagrufuBuPbn20fZ+E4Jyzfvt6fOuP38K3/vit4MNQEPurDbsBeDT4JY0QU5VgmfzkqZ/EKw96JSYaE3hm7Bnc9cRd+OYfv4lHNj+CeqUe3I9LNTo4iu+98nv42UM/w4/+8iPMHZyLXzz2C9y29jYudALAWSvPwqsOfhUefOZB/Ntv/g33P3M/7n/m/qDbfhXnHXoeXr7q5UGXmCBmmSGWJD6J51FyhpgB0ZQUyt5CyzinShukrrMwGlomKQKcTnShbCul7TBheWwdk5anEuB0ofqU/DNT6slWzOMWTU2ovko8pFh/dXQbSTwk5IwBFiNylpbJsgpRnsdD9dOwxXce906887h3uu9DSoYYO7g+uu6bAIB3PbIHpl/zanz+js/jc7d/jgtifHAAdhD6Pg7dAKye70XCXROLWSYr0QyxxJNlYoZYezt++5Dg58JZC/E3z/obDNWGeCj0pvFN2DS+CYNTwNGPA3ftCTw9G3h61xr8Hmvw5j2ABePRbQEAHz35o3h066P4+j1fxz6j++DcoWNw+Zqr8ODwBB4U3hTKtcfwHnjLUW/BE9d8Fz+duhdrR1v8TalcSzcD8yYlQkxY/WHJbXnc3sdh8chiHLHHEfjdk78L8qbCLh/5pI/nP9LEVccMYw124McrA0GsEf694lfgeR6ufs3V+Po9X8cHfv4BTmUduxb4ztV1fO27/4h/+82/4cUrXoyLjr4Ib/vp2/D7J3+PFloYnx7ndr7bLrwNz97r2Wi2mnybveyAl+GyOy/D3x71t7H1XDF/BT544gexa3oXPnHqJ/CHp/6Ac75zDkYHRzm5dsI+J0TfPouh+s0m1mxZwwOGp5vTkZHKgOCB4I1HvBGX3XkZbl5zs3J7//D+H8YEMZ/d3FQrnBKc26hhbCAQJ3ZO7eQP/EBgS7j0tEvx6wdvwnX3XIU3nfz/cNqBZ+CtX38VrtlwG1oesGy3A/CRkz6CodoQvnz3lzF3cC7OOfAc/OqxX+EnD/4Ef3fM3+Hvn/P3kb4N1YbwigNfgavuu4p/pnqIrVfqWDS8CNsmtvHMNSB4mGXCB/t3yO6H4NSlp+IXj/0CP7z3+9g2tQOzJoETHlPY6UShPAz15WQogLPv93H27wEcPQtotXDymsCOu8qfh3mVUXzj+8CiHQBe38RRex6Fweogz6xbss3DJb8G8BwfaLXw4ocCcnFlfT48z8PHT/04X85ZK8/CB573Abzy4ODBeu7gXJy16iz+91OXnoqb19yMAxYcgJH6CL7zV9/BniN7AgBOW3YaVl+8mgsle8/ZGxcdcxGf9wXLX4AXLH9BfN3RFmTYTyZMib9v2bUFo/VRToUweo6PwBbSJ7JQNzo4iu2T2zGnPofve4xk4dPuAuD7+NBJH8Kqhavw+sNfDwC46Q034VeP/Qq3rb0NT+54Es1WM7AWNBvcYtBoNjBYHcRbjnozFh98LE57PfDAwg0kEY2VnI0FtAlGFqD9+PbH8bnbP0dus+bX4LWAr/4I2DkA/ODAaU47PWev50Smvewll+Fr93wNT489jcMXHc4tPmJVWx6++z3g2W+r4ZHhKSMbZ6RaAYf7pauBQy6uYXN1CocuiosSp29dhBPXAL/YL/hdtY28VgtXfxt4x4uBe/YIwvlV1/iPnvxRjAyM4Mo/X4kNOzcol3f41AL89H+AQzYAA6+tAdU63nrMW3l+EwDgwY1Ythn45lXAGecBtepAxKIu1sdP/TgXxFSW0CWjS3DFWVfgXde9C8+MP8P3Sbk+ddqn8LFffAw/X/1zDA8MK9s6bslx2PYP2xLpMCA4lv7mWX+DLRNb8LHnf0wpwgLtkPhXHvRKfOXMrxhZJj940gfx/577//gxOVgdxGdf9Fm8/vDX89FmVfX+570fD21+CN/647fwnL2fo5zm/MPOR7PVxJuufhNec8hr+Hpf/7rr+TTveu678K7nvkvbx2XzluGXf/1LbN21FU/ueFIpKgHA4XscjitfcSXOvepcnLbsNADBdfz4fY7Xtu95Hr77yu/y3/cZ3Qf3vS1OP8nlez7ec/x7cO4h58aoMlYvWP4CrHvnOjy8+WGMT40r92MgsE1+6cwv4SMnfyTqdhAEsXcc+w7cuPpGvOLAV+CvDvorzKnPwZ3r78Rbf/pWPLrlUUw0JjCrNgvDA8PwEFgs9527L47c40icsf8ZgPcevPdXwHs//Q0886yV+PDNH8YXfvuFyAs4tl5XnHUFxqfG8ZMHfoI3HvlGvPPYd2LfufviP37zH/jATR/A3nP2xrXnXxuJCNk5uROf+OUn8JMHf4KzV52NC4+4EEtGl+COdXfgTT95E/7wVPAyYeVG4As/AZ6/31JwQYxCiMklEGIVv4Jj9jqG/+msVWfhwyd9GNc/cj0WzlqoPD+yEsXRVquF79/3fdz9xN2YU5+DY/Y6JrJ93nHsO3DVfVdhy64t8D0fZ686G/vO3Zf//bxDz8O9T98bjU5JWw0KIaawVaZaClMIqtScKs/TBsCL81Fsa2n5ZyRCjCAYUW2HYt/lPsi/G4fcU8U83baiZIhpxEpbu2eaYCS3z38nZJZRrb8keynBPqs8tixJx36pUhArWnm0UP1cK8UyyQ6anc0gn+Xv1+2JZw5/PT5/x+fbI92oQvU9D1f+L7D1pOdgwUcJb+y5INa+MW0KmyR2UUwaZVIgxFgW8Z/f+mdu9Tpx3xNx39vuw/pt69FCC88+9hWY8/Q2jNd9/GlBE2f/3UKsn9yIqUqQISUv2/M8fPWsr+Kioy/C4YsOx9CNt+Ddl1yFX568DNOfvpS/NRiqDWGvkb0wpz4Hnudhv7n7BW/Qv7Ue+NK9eOxlJ+Lez76PB0BX/WpAl61bh72POgXVoWA7tEeZFE544X8/+8LP4uWrXs7tOr/5299g7da1eHLHkxj+9Oew/PLvYdZ0C2gBQ4etxD/V7uLzykKs7/m44FkX4A2HvwE7JndgbPX92P3AY+DVg4eJD57UppvEMF7xwsHywsTtdfaBZ+PsA89GUv1/z///+P+PXHwk1rxjTeK0AGKh+mz5e43shTveeAd2Te/iF+Kh6hB/G/6Kg16BVit4CzjdnMZ0cxr3PHkPTvjqCbwNNSFW4dvqR2tPwIlf/Tlf9jPjz2BsagwVr4I9R/aE53l44fIX4sMv/iTv7o8O/QRw8snAqlXAfe2HD/amFQiox8+86DOJq/y/r/pfvsydUzvx9M6n0Wg1eBj7QGUA84fmR24ymP1B9yB64ZEXAod8EI0D9kfTa4fcR0oixGLVarV/Npv4+g+AaR+ovSsI+j7/D+12jlx8JLa8d0vbEiXSqc0m/vtHwX7pvzve55H6CD52yscS1+UbZ38DzVaTP9jKtIPJjbtY5xx4DlZvXs1Jj/c/7/1YPm85b/+/XvZfeOCZB7gI86Nzf8SFt7NXnY3b192ONx/1ZgDAh078EA7Z7RC84qBXAAC+/LIv46FND/Hj9wtnfIFTaq86+FXYtO4BvOLznwY8D6sWrsKHTvoQ79dgdRCnLjsVpy47lbYi24DfXQ78/NZv4N6px/HE9icwPj2OsakxjE+Po16pY7Q+ikarEdiRpnai5tfwtme/LdbUktEl+PrLv46129bijP3PwKNbH8WP/vIjPD32NCYaE5g7OBcLhhZw+mywOogdkzvwp6f/hP3n7x9YGlstVJvAt/8X+O9PvhLDL3wZjlh8BA7Z/ZDIso7f5/jUB300m1gwDtx449745r9eiDcd9SbaNlG0AwB7bgd+/cDz8MD/9w4ctuiw2GQLpqq4+Qrg6pXAtz7yCrzzWMWLqlAcvvty4LorPog9XniOcpELZi3AJ0/7JD5x6icw1ZxKtEy+5KFoH1XLA4CXPARcN/lqNM7/ayWRBgT7zuOXPI4f3/9jTknK9YZnvQEvW/kyfOXur+D5S9VU+nOXPBfXnHcNdk7ujIx0KJfuHMTqK2d9JXWaf37BP+PCIy7EIbsfYvVWW5Xb+Kw9nqWdh13vv3DGF7QC3OsPfz1eceArEgUjkxodHE0UIVm9+pBX47lLnhsjKvMsRisnled5kYwuXcWEG0EQu+S4S3DJcVFi65i9jsGdf3snraMCUbVg1gJcdvpl+NjzP6a2xFdquOpVgV1Q3E/fedw78eaj3xwbAAUIRL2Pn/rxyAsbAHjO3s/BPW+5B5ONSYxt3oDR3ZYE9xH7C/NnJMRUVfErXOiiFrsnYtciuUYHR/E3R/xN4vz/9bL/MloeWyarNLIGoJEuxiJIgoDTbDX58qg5VdogdaIIkrY83ciQlGnEz8g2wJTlpfWdZPETpqFsB/J3QxgQgDTggWZbUfYFCuEnT2fSlva4IZCOkeWVhFhZhSjfJ1smc6skyyQb+jo8uHi4faUaOXkCiFgm+YHp+6g2g4cHUjFBTLhhaAibRCuIKQgxnZi2auGqdqZVOMTe0EQTxzwOzK7OAibDQHUmGilOUIw0g+9j4Rhw9uNzgIOTbSHyeu6zo4J9VDcxmz0gHFkOEAmxuCA2VItm11T9KpbOWxp81vwfYApgKWx++GaSbVPWhmwD8TwPI/URjMwO7HmqmzAVMix+nmttiobqs+VX/ar27SgQrFvVa2dMsYcOlSDGCbGK394PqrVIW7LdSVkGo0ymled5GB4YTnzQFaejPIQCAFotVFrgeX9otdpvpNnvrFTrIBKmIWHDAvZVYlrkoVkU49mNTAuJN/668jzPKM+EWnPqc/DR53+U/37AggMi4vALl4cZfGGdufJM/v9Fw4twxcuv4L8vn78c7z7+3fz3F614EV6EF/Hf33L0W/j/R+ojePeqC4HNnwZGHRxXnodZUy28dN8X4KWLFmVu7nWHt229h+9xeGS9ScX2hwZwUe25gNCecYVt7be9gn888R8ztwMAK3cNY+XKlyVO5wE4837gzJdeAQwrjsdm+1zy4uFnAQThJSmrK3I8JB0bwuenNfYFVrxIPV1Yi0cW481Hv1k7zfyh+ZH9NalU4e95VK1SSySP8i4Kjdap7cBqyailNbiIJQhimYu1IRwTSbRkMLn6emkrbg5UBjAwIIhvIhWWkRCbySU+oCcRRmTShWBPVGWIUULu00bnS6We0pbnSctLWz+CAEdZHjUoPi3fDaDRbRSLX5owQ9pWBt8NVYDTEmIK8dCa3qPYSw0oMrHvtsdNv5SxILZ69Wr88pe/xKOPPoqxsTHstttuOOKII3DcccdhcJCOq5eVUEUgxFIsk+zgaoQ/K0JWDj9pqEL1TYUAKUMMiIpase2TQIg1w6wtcV7tg7I0KqUfCoFNQRDT5iSY3qywUR+nptR/F9YLEAmx9iQqci2xX2FVqsHDFlun1DYUN5Wsui2IiaH6uotbWsn7cYQQY+CTYAf2bd7uarZjIUrulyyIGRJikc+pYpo87wy/8XdWrh8S5e3czUrbr2zactVOWltp+7U8jct+JS2vPH7KmsmVhyDWzeNAXLYogsmjoFMqhRCbKaV8aE8QZgAi6dJKFlSoI+rJA4jZjs6nClsnLS+N5MkoKilFniR7qaYt03w3Y5pOsx1M892yDtYQEQ+zEGJEeo8ktBJyxgAaTZdm6+2HIj/NffOb38TnP/95/Pa3v8WiRYuw5557YmhoCJs2bcLDDz+MwcFBnHfeeXjve9+LfffdN73BstTl0UP1c6sEQUwO1Wfh9pWqQhBThepbCkURQkw4nk0JMS1dJpZ0g1IJM8xEQYw0P/VmhQlik5Pqv8uCGLtQCaH6SXSXsl/s19oA0Gxv01Sxr8CCmDeLdU0/hHJaaQUxdgEVLJO+wRDj7YW4I8RyKZUgJlbag7ZEiEU+p4pp8rwz/MbfWUnngkzl+/Ht3M3KQzDKeoxRhC75b0l9dylQGRJihfmOyyqLWjkTYh0vcdlMEKvV7NavCAKfg1ISRraki0QOye2L0+hEM/Ez0qiPGkGFKvJw0YUwqiWF2CLncAk2QG2GWEJbygyxtG1lkremIZq024pAUFHD5E3y3Sijl6ZllpmSgGl5cgDtuylD9YmC2BFHHIGBgQFccMEFuOqqq7BkSRTLnpiYwK9//WtceeWVOProo/Gf//mfeOUrCVaxsuLl+5z86Rq2mJIh1mg2om88KtX4xUhHiBkKRSxUH5AIMfmgTcoQCzPIRDGNRHiBNdeen7Xh64Qn05uVkIQjC2L84iI0wftlIPTV6sCEQIiltaFar7Vrgauvhj8U9EYWxDpyYt28GX4YBcOCxAH3gpgYqq+yTJKrCDfnupL322YTEHL8UkUCHSFGFdNKQkxdbNu6EMSK9lBFFZ9M2ioSIZYHAUddXlG+47LKolavCWIqQsyGMAd6mhBLoqwAGumSleRRtWU7kqEpsUUSlagEnMHyVMs0WR5AH/UxLfMqVTCS7J62OVxKcZSyPBf7AkGAI5OACWIl9btRiXklIaapSy+9FC96UXL+RL1ex8knn4yTTz4ZH//4x7FmzRpX/eu/8rz46IydrpQMMVF0AKKWST5qiTjKJDsITckYhSBmlSGmsEyaCEe+LSGWt2VSRYiZCH21AWAiPUOsPYPiJuzDHwa++lX4lwajqjUb02juCobi7Jigu2kTvMWsa24IMXYBUYbqVwRBrOI+ELfrZUKIqdZBR4hRxbSSEFMX22a98pAoVh6CUZEIsTwEv04tr6yyOl0uBbEiXHOTCDGbKtq520FltUzKuV+q6SikkqotXVC8bnQ+VVA8JUNMa20T+p4YFK+ZRpyOHBSfYgkFoH0RTRGfqKSS/NJatzxd35UZaYTlkcVKB/tCks2WMgABxY4sr1+/h+qTnhh1YphcCxYswFFHHWXdob6vAlsmxbcKEVtcpaq3TMqEmHGGmIVlUiTE/Lhl0sRaqLJMaoWnTlkmxROeiVAXVmWgHpnXKkNsw4ZgnnAdmp/7LJpHH5XeF1c1Pg6Mj3ckQ8w5IVbUh9Q0QUz3oC0/qOsIMZ2YVhJi6nJJiBXhIVEslwJoAuVsXL1CiBXlOy6rLGr1AyGWVRDrgetiLMDe0jIpv8wUP5N/T7tPpFgmVUJC0vLScqNMLZOZRReKeEggh5QZYmnElglNp+k7hUjT5XCpxFHb74bvCyAQYsQ8OZ3t1dheqqHbKKOJ9ks5W+vp6Wk89thjrprr3xIsk10nxDSh+uJFR5khJlgm+cFlelMSiix+rT3SFjlUP0KI+bF5TcLnjQkxy6y03EP1VRliiGeIpQpiQPs73LkzmCfsTPOhB9Hc9Ex6X1zV5s3h8lm3cgzVD6dpJYwySV9IwYQIuVSWyaTf08QyU0IsadqibqtOVx6EWFEeqvLIEHMlrIlt6panW2ZJiJVVFr3yEMSKEKrvee0Igj63TALpFjil9UtD1kw3p2Ofyb+n2eRiVFCK1SxtpD9TC2NmUSlFKLG2l2rESnKmFyEXi0LKkXOxUkLnje2zmn1PS4gR9pfEvuv2K11WnEQe2g5A0C/l7In1z3/+M5YuXeqquf6tIhBiCW/XxVB98c1CpVKLqMwA1KH61pbJNo1FtUxGVoeF6gsfkwgvABgcbJ8QPUF4osxvKog5IMS05Ju0P1UGBiPzsu2TGqoPtPeRsbGga6wfaNFEQ1e1aVPQNbZ8Yd/UfscJFRs4Ii1U38YyWYSbc111ixDTzVvUbdXpch2qDxTnocqlgOPKMpkXIeayXyUhVlYvVq8RYiLdy+5pS0IsnRATHs5NrG3yvPI0FFEi60iGlKwnsQ9aAc5QVDJZnthXeXkUUgkwGAGUIubpvhuZ3rPM4YqIWIRtRcmvo2SksemSlmeyH5Oz6TR9V5KApWWyrEKU5xWHEJNuHsSDS3yz4Nf0hJi1ZZKPMtm+aaBaJsXlqjLEtNtWfNgcGmqvG5UQM33Q7FKovj8QEmLs3ipt3cT5ZUGs1b7AdkMQS7RM3n9/YKskFskyKWSIVaoD8UZSF1IwIUKuLBli8oO6LBToHuR18xZ1W3W6eo2aEMulgMPmLxIhlsf66doqCbGyZnL1miAmEmKMDCsJsdT8LNMMMR31RBkZUlymVkgQBKNUWx6V2CJYNHXLsxVKVMukWEKpAx6oxDxdLpYJvZeaIZZiZ6XSexSxUivAidtKZ2FUiJVJbZFHLzUdEKBPCTHy2fjII4/U/n3c4KGzLE35Phc6uubjTXi7HgnVFy2TlQF9hhg7CE1vSlQZYqKoJR/cCpKq1Wq1CTFBqNEq4KLwM2tWmxoSBDFSeH1elkmm6JuG6ssZYnWJEEujzHSWSdaPThNioWXS48+/Qqj++C5g1Srg3HOBb3+b1BzJMikSYjZveIsmRMhlIoilEWK6303osqJuq06XS0KsCA+JYuVhmSwSIZYHAadrqyTEyprJ5VIQK4KAVBJiyooRYgnUE0CndOS2VdNQ7JCmlsmkYHPTTC/boHiVUKLNxdKJhwSLn5J6StlWaXlkVEshlUhLG/AgbXRF17QgYD5aqm0WHkVoVYm2/ZohRhbE7r33Xpx77rmJtsgnnngCDzzwgLOO9W15wiiTBbNMJobqV6txVbtaLQ4hJhBeyvnkErd7JwgxU8skX6/2JFYZYmGovlWGGOuTTIh1yTLJCTG0xVp/MhQYV68mNydfjNSEmJ/NMlmEm3NdpWWI6YQL+UFd97sJXVbUbdXp6rWHRLFcft8lIVYSYmXN7OpVQsz3S0JMqDTxyZSs0YXqq2xytjlVtpZJ0vLSbIBpy9MIQZHlUQmxBHIoIlZSt5VJ3hqh76n2UgOLJkVo1Qp+GrsnddRHOcRfNZ0ys8xS2KXaiPuhyGfjQw45BM95znNw0UUXKf/++9//Hl/60pecdaxvq0iWSSC44IYHh5ivFFGvhVB9dnBCsJXllSGmC9Xny23FM8S0OVtiP4GIINbwDa2JeRNi4iRpdFcwY+TXSn0oMm8qZaazTLJ+oLuWyQghxvaP6WnVnMoS+xxpqyUIkb7fFiBtLJNFf7tbEmLFrTwIsaJs2zwytkpCLL7sssqaCdVr9nBVqH5JiMUEgKwjGRoHxROILerofFprG0G4oJA8xqIS0ZanzRBLCndXWCZTtxVBzCNliFEHIKDkrWX8bowJMaKFUe6Dcnkam6PRgAApNuJ+KPJd9fHHH4/7778/8e8jIyM48cQTnXSqr8srQKh+wo20ihDzmwHBpbVMsoPL9C2dghCL5IClWSbZSbhiSIhJghi3igqEGEl4MhXEpqfV80gjy8nZaKxvgB0h1gw/ThX7ZMtkqxUfZbJVnFB9Log1GooZ1SXffCktk76fzTJZ9Le7aYKY7sG+JMTyLdF2k7WKth8W0TKZFyHmsl8lIVZWL1avEWLiubskxHilEmIqssYyF4uS9aRqK00ESbUBiiKPrQCnooIotkMX4mEnCDGVeJjSlqod8TOdddR2W2nz5AT7JSVvTRvi3yLuxzoxTxKbdQScLpuuX4p8Nv785z+v/fvy5ctx0003Ze5Q35fvF4sQazb5xVc8EfEDrAWgWtWG6vOD0JSc4hliccukUmyRBDF+oLMTTBoBxarTlklRsJmaAur16N8JhJhNqD4jxBqeBwhWR3KG2OQk7xtrudFlQkwpiBkQYuK+IQtiSltopQczxNIsk7qH/zRCzIQuKwmxeEnieKYq2n5YRMtkXoRYJ/pVEmJlzeTqNUEsD0KsB45rUn4WPLTQIhNU/DNbQkwSJXSZXqIoQcoQ0wlwmuWJAk6avTSVejKx5QlPGkn9Ssuyomwr1fqlBfTr+gTQ6DbTbWVrO1SJh5T1U7VFJcRiNB3VXloSYmUVoopAiCXc3EdC9cMDrBIKYiISC0Adqm9rmRRuGpiopTxgk4QjiRAzskzOmtW+WHmGWV2mhBigtk0mZojFCTFS2D/rJgvVZ4RY2rrJGWIhHQYIglSXMsSUofoWgpiWEGP3oOI69iMhZjJSpI76Spu3JMTi5ZIQK9pD1UwmxCh9z4sQ64RFs6yyOl29JoiJ524miNkSYkV7mZGhYoSY5YO9knpKEnlaNNElqwiisu6lBear+i1+phMuVPZSX/GYb2rLo1hHqdtKnk81TVZ6T9xWFHqPIq5R9k8KvRdpi0g66rLwsvaduo/2Q/XnWhe5PK+zgoKq0iyTzUZEKEBNYZkUCLGslkkxuFy7bZKEo/B3EkUlzA/AjhCztUwC6mD9JKFPOLcZh+p7Hio8VD+8eKSJanKGWJgfBnRfEBND9dk+WGEbyJFlsm0LFSi4Wh9miOke/k0IsbR5S0IsXi4JsaIJs3kIRp0ixDqd6VUSYmX1erkUxIpwrlOF6tsSYkVYH0dFsQvKIoFOVNIJCaaiC0Wg0okgVPqGEhSvop6ShJJI3wk5XLq+p9k9STlVBOqJGqpvIo7qrLEqW2XW5VEsoUB2y6SSdLQUK5XboU8tk6UgVrQqomUyrEiofniAVZqIWCb5CcglIWZpmeTCkS8RYoaWSb7eHrEN21EmAZogxgkxYRIK/RYj39rrBVhkiCkFMaJo6Kq2bAm6xrvljhCLZOW1pLcybB1tLJNFv5l1RYipRK2SEMtWvRyqX3RCjGqZ7ESmV0mIldXrlQchVrRQ/ZIQI4Wkx0QzgjAjti23kxYgbjo6X1r+WepogCBsA4JwEbl/Jeafye2rpjEKnbfMd6OG6pMIMYK4pgzez0ikUUYABYikY5O4HxPEyqzCbr9UKYgVrYpgmUx44FWF6jPLpHiwtxAIYnyUSXZw2WaI1eKh+sptk0iIhSdsqlDTTUJMZZmUbFJtQkx440Ch3xIGC2DzGmeIRSyTbcqso4JYuL20GWKOQ/V7nhBLyxCjEmIqUaskxLJVr1ETYhVdEHMZqt8JQqwUxMqaydXLlsmSEONlEihPzUKSP2NlbMujjs7XUhNbytyvjAKcduRE4XcjmyNx/SiiC8USquoXNVTfOEMs4buh0nuyWJlmTUwM1Ref13Sko+l+TBErDfteEmJlFaM8r/uEmEGGGAvVFw+gVqsVtUx6/D+xNrXFCDHBMqkVtVIIMSb8GGWICYJYwzcUnvKyTPK3A8IkFHJNPMkJ2WhNdtJME7LkDDEVIdZpQawRPdHH9k3AWYaYj/YNAstdsxLEin4zmychZpI/VhJi8eplQmwmWyY7TWxR2iotk2XN5Oo1QawkxJQliyVZs5C0ZI0ie0krghBG59MJOCrRRdcWVShJEi6UFBLF5phmA9Tln1HoNmkaVVsUmyO17ybbKm10xdi+pxGedHSbMkPMlnRULY9wTGj7niJ89kOVgljRyveLRYglZIglWSaB8GCfPZuvB/+bpZVQJMSmTSyTEiFGFmok4YgLgVSxJ+sok3IRMsSM+gXEhD4AbZEnqQ1KhlinBbFw23h+/E2ce0Is/ubGr/bgKJOuMsRUopbuQb4kxNKr1x4SxeonQixrv0pCrKxer14715UZYsqi2CFNKDJTskYrUBFEF4oIQrVMksLPNSIImUKSbY4pVrqsghHFMmmcIUawhGppOsXybLeViTgKpOxXhll4pMEhCOJo2nboh8q01m9961uxceNGV30pCygGIZZimZSFAqUg9prXoHX6SwAI62FCTokPxNV4qL52lMnw7VuMEKNQVGI/gUTLpJYyy2KZtMwQMw7VnzVLmY0GaLaPzjLJutolQcz3g32kmTFDTPxekwix6Wa7vZ4kxNIsk64IMVMxrSy3hFjR9sM8BLEiEWIuia2SECur16vX7OGiZfJ5zwMWLwZOP92urSIIfI7KxC7IRR7NyInkDDENhUQZ9ZFih6QGt5MEOIKAo6SQCAJcKgGnsxRSxDVVUDyB2LIWKwl5a9Y5XDpxVLNfmea7kQY8IOafkfqekt3WD5Xprvp//ud/sG3bNld9KQsAvAKMMplkmfTilslKC0CtFjlgm60msPvuaL3udQCEg8tEKBKpnmq1rXR3ghBLIqlMCbG8BDENIUYS6oAYISYKYjPKMskIsVAEjRBibDc2IMTkUXpUhJj45qay+yLzPs90QkwniGUhxNLEtLKiD1VZq2j7oUtBjM1fJELMJbFVEmJl9XrlQYgVJVT/8MOB9euBCy+0a8v0HrPAZZJnRSG2spI84nTk0fkSRAmlaEbJSNMRW7qRE1UUUkYBjhw6T7Rfqvoq/p6W72ZM06WIh+SRGg0pKy29p8n0oozI6VKspB4T/VCZ7qpbPfB2onBVNMtkQqg+f1OjIMTYQRU7uEze0okiUaXSPrB1YkvOGWJNz5DEMrSGAqBZJvmFsV3GofoJVlBAs30KbJn0hXw7PuCDBSEGRG+GVKH6EULssMPN+1yEt9W6ymKZLAmxfEt8qMpaRdsP87AUloSYm+WVVVanq9csk/LLjCzrVYT1cVRGljtXIk8KDWMigpCD2wk5XOSgeMJIhiQBjiqUaPLPKHlrJiJP2gigRnlrSBaMbHO4dLbKNLqNYv01sZemWXGN8s9SRNt+qMxPrP264XKrolkmhQtuRSU6JGWIQXEQmrzVEkUMgRCzGmWy0ha0AIJQI5FUKmshKbw+b0JMWBapX4kZYh4X+gAiISZbJgsiiEUJsWyCWJJlUhcMSqoivK3WlU7kkn83JcSy2C3LKgkx07ZKQszN8soqq9PVa4KYy5cZRTt3ZyhKqL5NhliayEMZFZEq8qRRQS7Dz3VB8aoMMWvxULTSEQQq4wEPErZVKiFmYLHV5Z+Z0nvUHDVK3ynZbU4IMemZJYs1th/KaIiTpUuXRr6Y8fFxnHTSSagKGU+PPPKIu971Y3le8QmxZiNqmUwQxGIHl8lFXBbEPA9oGVomubUwSpcZZYhFRmM0tExSb75MQ/WFCxUqFWB62jxU3yZDTKyCEWIsVD9yo8OmMbBMAsmCmMoyabWORSNz5NKJXPLvpoSY7iG9JMTSK4+HqqJs2zwyxFhbtturJMTKKqs71auCWC/mP2Yo0Qoo/h6ZxoBo0opKou3QROSxJGuU1JOlZTLSVoJwEcmpMrDlpW0DL3y5TOo7YRpVvygEnDid1l6qsrMmWDTZdKo+kZdnsF+JkUMUGlLX9wgAoOs7cfCEfg/VNxLErrjiCv7/VquF008/HZdeein22msv1/3q3/L97hNihAwxOVRfPLDZyTp2ATC5iEsZYmKOF0AUxGwJMc1ojEbWxNwJsbYgxsU+mwwxr71eQMq6eV5b6CgCIdZgF/1w3+wAIRaxTPYDIWaSIaYTwGSBzFRMK8stIVa0hyqXAo4rQawkxMoqqzvlUhArwrmul+neDKWjnFiZEE3krCdHOVU6skZsx4SgSutTknARyRDTtGVkAySGzhsHxVOoJ0pWnI4QI4bcZx3h1LrvBPFX1/fU/DOCWEntez+UkSB20kknRX6vVCo49thjsWzZMqed6usSCLGuqbQGGWI6y2TMd25rmfT9uGVSJRamEWLh4lMzxGTL5EScpEoVjcT+6ErOVTIZZbLV4p9ZEWKmGWKsjUajeIQYC9VvtYSRiMJi4grxRjRJ+GXbPbMgVoSbc11lyRBLs0iWhFi26mXbjUsBR27L9iE0L0KsE1bOkhArayZXHoRYUUL1s1bR7yEMSkfuyJ+Z0DBpWU9GFBLRUphEPbHp0toiB8UntCUPCpXUlqlQwv5PESLJGWIa8VBHKsnfDdXOGhMPRXsphdgiiFim9lJrIs3QikvZF9Lotn6oTE+s/aoi5lqep8/J6kQl3EiLBxc/Geksk/LBZWOZDO24niRq+aoDNpEQi4pptoRYJEMsTTQCzEk4gGaZlAkxWITqy+SbsDlJYp8siDWDde1ahlgl2E+UhBhgZJtMtkwG5YwQA4p5Q6ujvuTfS0Kss9XLtps8LZO25ZIQy5OAU1VJiJU1k6vXLJMlIaYsHbkjf0YRJagkT9ZRHylkjUqg0maW6UQegg1QZZm0FnlEwU+GGxRtmYT4q6ZTZnpRvucU8TBpusi2MhjhNG0bmHzPJHE0jeqijJBJoek0eWv9UpnOzC5Hmbz00kvheR7e8Y53OGtzRpZXsFB90TKpCtVvAajVIn2NZYjZWCZlQYzROWETVoQYE7NMMsQka6FzQky285kSYpU25QUQw/4BbYYYWRArgmVSQYjFMsQAJ4KYs1B9ebTOopWJZdKUEMsSyF9W71ETYrn8vl2JQS4JsTwJOFWVhFhZM7l6TRDr5ZcZGSopXF31mVYkAIGsYffOoIk8VEshSXQhimtJ66fqe0yAUzyHubABkmgz4oiVqr6K7aRZ90j2UkJbEXspxYpLzaYz6buOTqSIlVQ7K4Wmg3400X4oI8ukXNu3b3fSiTvvvBOXX345DjvsMCftzejy/e6H6ifcbEdC9SXLZCRDLFyD2FsME8skEzAkQoyLLSaEmImtMFhY+/9SqL42w4yVzXqyMsgQEz8z6heQmCHmwdPvd+JDdIEsk2yUySZEsVZYD4McMfEiEiHEVG+5bERrcftmsXPlVSaWSVNCTCd6lIRYerl8qCrCQ6JYeRFUun2n0Qi2ZdI5jypiUcS8khArqyx69aog1ivr46h0QgUr0yB1VbtiOzrKSvwsK6VjLbpYUk8qGyCF6krNENNQSDb5bqplUkP1TbLidDZAlauJRMCl0YImfSfQiU6INAd2z36orj+J7dixA+eddx6+9KUvYd68edppJyYmsG3btsi/nquiEWLCBVcXqg/ETxyxi4lNhlhI/vATie+FyzIgxHw2b3Q9EksSjlRZW2ThKe2GRRZqCJZJ3rRgmTSycgKJGWJkeq5gGWK+H1omRUJM3PQOCTFmmUwVDxMXUHBCLItlsiTE8i2XtpuiUQYuBRyKYDQ1BRx6KPCiF2VrR56uiIRYKYiVNdPKpSBWhHNdHufuHjiuSYSYgUBFfvinkDwZaTNqphdJVFJZJjMSTcaW0IwClc4yGcn9ykqIKdoiZYjpCDjCd0Pdr4wEuLTlEcRKFwMC9EN1XRB729vehjPOOAOnnXZa6rSf/OQnMTo6yv8tWbKkAz3scHle9wkxSqh+eHKoaASx2EnU5K1WQoZYsxYKZKomiISYrWWSLByZZERltUya0G9ivyIZYi0aYSa2IVsmwy523TIpZoiJFzcLQiwpVF/35oZUMiFWtDIhxHSUVxohphPaSkJMXf0Sqt8Jy+STTwL33QfcdFP2PhWdECuK6FlWWdQqCbHkKsL6OCoKISaTLjrrF5Ws0ZI8MlljSZupcqrS8sHS+k62e1IEKsNBA2xFHnn9VP2iUF2RtkDYVppcLGWGGEEcdbpfZRQYqWIl21apwm5JiHWvrrzyStx999345Cc/SZr+fe97H7Zu3cr/rV27NucedqF8nxNiXfPxJjxMiBli/OQuCGIRoQZtyyRfjyyWSXaSqFbCNk0IMQNbYTBj+/9S+LwxIZa2rhaEmC5UXyvUyYQYI/5gYSeNEWLBd91xQSzcT9qWSZEQE/ZjA0FMlZWnGmXSev2KToiZZIg1m8E++/WvA489ZkaIpVkxywf6ePUyIdZpwYhNQw3LzzqdS4G3JMTK6vXqtbxEl+fuIqyPo9KJSPJnzkQe8T6RIK6l0WZJoosy08vSMqkaOdHWOmqa+6UT4EzEQ9EymTQAQRqpRNpWKppOJhGF3ykkIDXTy2gEyYxUF9nOShR2+fr0KSGWKUMsS61duxYXX3wxrr/+egwODpLmqdfrqNfrOfesy+UJo0wWwTKZkCHGc5qaSCfEslgm5VEmQ0LMLEMs+NjaMimEzxuP5pg3ISZZJq0yxAShj7xtms3Chuq3R0AV9hGXofqaN32k6jVC7JprgDe8ATj3XOADH4j+LU0gE6skxNKrJMTM20oTjLJmg1Gn67QltBSUy5rJ1auEWC++zMhQupEZ+WcEyyRl9ME8rG1MkFAt0zRUnyKUkO2eBiH3toMGKPueIuYllSpDLKuopAu5t84Qy2A7NBFaKTRkKiFGyCNTZSOXofodrrvuugsbNmzAkUceyT9rNBr4xS9+gcsuuwwTExOoVFIeznuximaZFC644kEfsUzWarG/A4gr8zaWyQojwsK2q8wyaU6IkYUadrPheUC9rrZM6oQjE8EjQ6h+C4FlsgXQqMKkDDEQhb5g4cFPmRDrsmWSZ4hByrdj5cIyGa5XZstk0Qkx0wyxTZuC/2/alG6R1D2kl4RYeuVBGRRl23ZaMJKn0VHHaX0yJcRcCn6U9SuK6FlWWdTqVUGsF19mZCgducPKROTJanMU56VQXaLIo82pIo6KSOmTyQiSJAKOuq0c5buliTxZR7VU0XRZvxsqLajbr0yEVtKgAS0ikZZxBNB+KStBbHp6GjfffDMefvhhvPa1r8XIyAgef/xxzJkzB8PDw6Q2Tj31VPzxj3+MfPbXf/3XWLVqFd773vf2pxgGRCyTRSPE0kL15ZO6U0JMtkzq+i2TVOyExsQsaobY0BDgee2TClXsyTlUXybEmsJuQhbqkgixtG1TNMskE8SqTBBzF6ovkpABIRZ8ziyTqdsqqWY6ISY/2LNtK1sigeh2T6O+TGiyfi2XD1VFowxcCqA2FkbVPcdMIMQoIf5F+Y7LKotaLgWxIpzretnunqFIhJiBQGUahJ+VstIFxasIMZJw4cImRxnJkGIDFIk0wnbQUkjUkSFd0XuavqsIMYpglIkQM8kQIwqoWcVK1QigpWWSWI8++ihe/OIX47HHHsPExARe8IIXYGRkBJ/61KcwMTGBL37xi6R2RkZGcMghh0Q+mz17NhYsWBD7vK+qCIRYwo17JFQ/PFhVlkl2gMZOMuJFPOmNPCs5Q4ydSKoGlklbQoz1a2goMr3VKJOmGWIWhFhDWJwRIea1c7eM89WKapkE8iPEwr9ntkwWnRAzsUyKIphMdQFRQUz+uwmJVsTt1I1yabspGmWQF0HlShCbaYSYSwGurLI6XSUhllxFO3dnKCNCzGAkwzT7G4WyIolKopCgWRdXVJdxpldGi6aOshLnJW1PF/SewXcT+Z41+W6U3DkK3SaKlbZ2T8r2VIqHjo6bpOn6oYzvqi+++GIcffTR2Lx5M4ZCwQAAzj77bNx4441OO9eX5XnFJcTEUH2e04TkDDFZuRYPfio5FSPEQlFINXsiIRZ8bJwhFu7ffL09YjC/CQFkQoiF7WoJMWqovosMsfHx9sdFsUwKF8CK4wwxZ6H6RSfETC2ToiBmQoilCW+ymFZW71ETYnXTMplVxCoiIVYKymXN5MpDEOvmdaTMEFOWjqri01BECZlC0ogN5JwqgrhGJsQMQudd2PIoIg8pb41K05kExWuWl0ZZmdB7upETld9NRlGJLLRmFCuVAyzYkpUqYbckxGj1y1/+ErfddhsGBgYin++3335Yv359ps7cfPPNmebvifL9zgoKqkrJEIuE6rcQz/mSLJN8PWyshGHbmQixiiEhJglixhlieYfqi4SYJIiRhbpZs1Dxw5MlLDLEBLskAPjN7lomvUpwKsuTEPOlC0dJiCEqclEIMZWY9utfA4cdli6mldXbhJhLi58NIWbbDrUtqgV49Wrg298G3vpWYO7c9H6VhFhZvVi9Roj1cv5jhopRVZQcJ0vRxdTaZkJ1ie2r1sVoJMM0yyQhuJ0UhO+SpiOM+kgeNICQ70YWjBK2leq7IRF3aUJrRrsnhRakWELF6ajfs/xZv5XxWjebTTQUtMW6deswMjLipFN9XQW2TFamgu+92WpGLZNhqH6EXALiJzUbK2ECIabccV1liCVYJkWSypll0iZUX9zOvs/XCzAQ6kRCzGuZZ4jt2BFtuhsZYoIY41dSQvUNCLGkrDzKW0pSFZ0QM8kQMyXE5Af5664DTjgBuOSSkhCjVC8/VLkkmopIiFHb+vSngX/8x0AUS6qSECur16vXBLE88h974Lqos7KxIhFGBFFJZW2zpnQsLZNZbYdpAhUl88pEVIoIVJZ0m2movomolBY6r7V7yuKhC6E1o5hHIeBUGWKkwROIwm5pmSTWC1/4Qvzrv/4r/93zPOzYsQMf/vCHcfrpp7vsW39WgS2T/mc/CwBo7BqPig4phJjSMkkViuTA/oqBZZJniIXzmhJis2YBEAQSj0hS5WWZzEqIJWWIeUQraLDw4KcsiHXDMinc3HYiQ4xRiZktkyUh1v6dUcXr15eEGKXKUH1auSK2XBJi1La2b4/+TGurJMTK6sXqNXt4L7/MyFAkQiwH4cJkBEknofomlkkqQaUTuwzsnmSaLkWcSVqeyfZMy+Ei5a2pMsQIfbfOLBOF1ox2TyOaThhEjLQ8wjRJ0/VDGZ+ZP/OZz+BXv/oVDjroIOzatQuvfe1ruV3yU5/6VB597K8qAiGWJIitfzz4qDHFD1atZVJHiGXMEPOZathsAh/9KPCznyWTVH6b8ALMM8QyhepnsUy+5z3A0Ue3w+uTCDFhceR+JRFi1G0jBOoDXSLExH2z0h5lkt8IZBTEGq3oKJNlhpji9yyEmDxvSYilVx4PVUXZti4FHFeWyW4QYuIxkTaNbjqqRbOssopYJSGWXEU7d2coE0LMREjQCSWi1czaJicJJap+mVomqSKPru+kPDKCwBjJEDPIW8s68iWVgKPaS+X2rftOtB0aEVu2opmKprMV/AikY7+UcYbY3nvvjXvuuQdXXnkl/vCHP2DHjh248MILcd5550VC9suyLN8vFiEm3EBUGqHo0WqhwUQBQRCTD77YyTbNSrhhA/DqVwNvfCMwOBguVGqb/c769bvfAR/5CHDgge12ZJKK3TuYUlC6DDGdtTALISYKYt/8JvD448B++4UdTyfESP3yfWBgAJUpQ/JNbEMWxLqRIZYgiLVFLOHm10WovucBLQeCGBBsR1kgKkqZWCZFQqzRMCPEZLqsJMTSq1cJsbR9zrRcWSapIh1lOlPajLo8CiFWhO+4rLJMqgzVT64inbszli7snE9DELtMxI00a5ss4GTJXvLgoSW8qM0zKF78zESgImeIEYRBW2FGmYtFsXtmsAGaWAqpAx6Y2Fmz2jiN7ay6768kxMwFMQCoVqs4//zzXfelLCBCiPnd2ikTbtyZ6NFAE81GIApUmoiNMskO0JhynSYU/exnABtY4S1vCX4mZoiFbTFbybZtwPBw2NEEQowiZgnzxzLEqGIPRRDbsAGYN09vmWT/37Ur0i8Rz5UFMa2IKq6X50WEPuN8tSJYJoVt64X7iXhByssymTlUH2gLYkV8w2timTQlxGQRgv29JMRo1auEmGtBzJQQSxLMTaku3XQlIVZWWfRyKfYUgRDr1XN3xtKFnbOyopA04gY1mJ5EpKUICZ7npQolroLiAZp11MQqaBrcbvv9KUdOdLQ88TPddBThM1VopYh5BqNMUmlBEglIEOmSltkPZSWIPf7447j11luxYcMGNKUT8tvf/nYnHevb8jwuKChzsjpRCTfSbUKsiQYTxCiWSRUhprox2bAh+DkxEcsQ4yeJCrNMhvMw0Whigmd+xQmx8KRAFWokQYyJROSsLc/TE0Br1gD77w+89KXBSGJiiYQYW7fxcfV6SaH6vufrlX0pGy2Sb0AlxFgbRQjVTyHEKi1hW7ggxKSbpEzr5/tqEagIZWKZNM0QS8ofazRKQoxSedhuirBtXQpi8nm3KIQYVaAqCbGyyiotk7rqYULM9sHeNGeMIrqQ8plSrGa+50fvJbNmbDkQ16xpuhSro5P1MxxFk0IUUvtuK2KZDtaQdXnUvDWK4FdaJttlLIhdccUVePOb34yBgQEsWLAg8oV6nlcKYlmraJZJJSHW4oQYJVSfH4Rplsmnnw5+Tk7GM8S4ZTIUhdiNgEhRJWaIhQd8uPjUnCy2T0vCkZHYoyOA/vCHYP3uvZdGiMmCWAIhZmoFFbfDlKllMkaIdVcQ80JBTAyZjFgmDQgxLoDKhJhryyRQzDe8nSTEdGJaSbjEq1dtN7rvPq+2XGaIUaZzaZksCbGyer16TRArCTFlUQgx16PzRUQll7Y8iqXQMqdKNbIghW4jZVnpthWIhBihLapF02gEUAJF5qItJ4QYYbub2BxT7awWx03SdP1QxoLYBz/4QXzoQx/C+973PvguTuxlRUuwTHrdungnZogFnzfRRKMRiDWiZTIS9g5FqH6alVAliLHMMHYiqQQ/eag+I6omJuKCmCNCjJ8wfJiFz6sEAgDYtKndZ12GGJUQowp9GkIsjBOji4WxDLHgZ0cFMUFsUWaINdXTplUqIebCMlkkMUIukwwx02D8LGJaWb2Xq8NK7kNWQkzXtupzCiFGDdV3Ja6VhFhZ/Vwuz3VFuN726suMjEXJEDOxAZJFF/mFvWI6F7Y8EmFEIZXEF+HSZ8p+uRR5DEZ9tLZMCs81RrZDAkWWNp3J4Am671jM9LImtgxpQSM7q0bYLQkxmI8yOTY2hnPPPbcUw/IqzxMIsS5VUoZYKIg10OKCGIUQI1smN24MfopCkUSIJVomp6ba8yQRYiZiFhAjqYzEHt0NyzPPBD9F4Y+VShAjZoil9mnffYOf++8PIJoXZk2I+ez76LJlsloLPnJAiIkXkVxGmQSKJUbIlSYoJNkeTQkxU7tlWb37UOXSMkkV11xSViUhVlZZbqvXxP9etbtnLMook85EF6K1zXTUR13fKcSPjYhF7RdFVNIJM2k0nY3okrZ+JFHJgLKiTmcrKikHICCItlnFQyotSBU+5c/6rYzX+sILL8T3vve9PPpSFhBYJsP/FtUy2UQLzfBhlZQhxtbDxDIpZYjJhFjMMgkkklRMoOEklWWovtVojKaEGFsfccQ+HSFWqdByzQBg1Srg/vuBb387Nj0nxKjbhgli4UAG3bdMBv1uAbkRYvLFM1VY1S6kQGKEXGmCQhZCLMu8ZfWu7cZ1hphYJSFWzPNMWWVRqlcEpF49d2esWIaYraXQgKyhBtMbiy62mVcUUkklwFkKVKYCnDa43UDkoRBwqaISQeRRDXhAEQ8pGVupQqujAQ8opKNTO2tpmTS3TH7yk5/ES1/6UvzsZz/DoYceilqtFvn7Zz/7WWed68sSCDG/W9fuJMskEz3kUH1ZgAoP9thBb2OZjBFiwc8YIQYkZ235UWEsVag55RTghz8ETj45Mr0o9pCFI9UNGBPEdISYSIrpMsR8n94nADjgAP7fTBlizDI5PAxs29Z1QUwkxPiFRNz0DkaZZHuvk1D9It/Qmlgm0wgxedosdFlZvUsZuMwQKwkx+jRllVXU6rUMMZfn7iK/UDMsEiFGyPQiZS+pMsQoFkYd9UTNqTKxTBLaSesXRXwiWRhFms5B/llSv6mh+pTlUcVK08EF0vqeln9G2e4mxF2qnZVAAqoIsX61TFoJYtdeey1WrlwJIPqF9quq6LQ8YZTJbu2USZbJ6dAy6Qmh+kIfxRNa0Ix0UhP3D9WFnJIh5kuCmCgcjY2FHZFIKlPL5HnnAa99Le8vP0GZiD3shkVHiIkkXL0eEGOiBZSVbJmUCDFbAcpJhlgRCDHPg+f5QEsTqu+CEAuBWqeWySLe0JqG6rMSqUbxM3HakhDLVi4pgyI9VLnMENOJskmfuyTEKAIchRDTnbNKQqysXq9eFcRKQixSNoQYacTANNGFkItFEi5aNArJxDJJCcLP1JbBtkrLWzMh84wHDchZrCQNLmAiHqbkn5l8zxSbY+rgECbrl7Jf9UMZC2Kf+cxn8N///d+44IILcuhOWVHLZJcq4UGBEWIAMM1C9VvtXiZZJiMHfVLY/OQksG1b+/9po0yyvojCkfSwyEkqyTpJEjKEEwIfddBE7NHdsIgZYqz/Q0OBIMYEPgr5FhJi5FB9qTJliDGRjtlKuymI+T58vwI0wuWrLJMmo0x66lEmKSgzuXSCabcrTVAwIcTSRplkfy8JMVrlQYgVYR80sUyuWQM88ADwwhdmaysvQixrW+Ix0YnllVVWEavXQvV79WVGxpIfvnMVXYT5SLlYBKsZ1cJoIuCQRwMkWOAoNF1ahpjRqI8UYSZF5DGysxJzsWwD+inbijwAgcV3k7bvUSyTJhlw/VzGZ+Z6vY7jjz8+j76UBUC0TBZilEmREGu0/z/VCIQb8aBPDNUXD9QkIYAF6gOBMJSUISYTYqJwJC1DHmWSnCEmNydmiJkSYjrLJNAWu0JhSSmIsf87JsTE78U4Q2xiIvg5OBh8rBLEzE8vZiUIYl7YLzFUv5LRMtloNdoXt1Z7e5WEmESImVBeOstkSYilV68+VJkIYuefD7zoRcC996r/XlRCzKVlsiTEyur1KkP1k6sI6+OoYoQYxTKZUXQBUgQqA6tZmpAgi122QfEqAY7UloOwdZO8rqyjHVIHPKBYUNPoPfmZ1VbEEvtOpe7EtlXLIxFiKXZWkxFOnbzon+FlvOYXX3wx/v3f/z2PvpQFBIIY+2+3GLGkDLFG+/9T04FwUxH6KJ9klQdqkhDA7JJAlJxilklJ1OJ2OJ0gloUQE5tjJ3O/3QZ5pEqdZRJoWw9nzQp+qiyTUpvyKJPcCmoo9Hmex9fNmBBjwp1OEMv7xMoEE0aIQbJMNhXTEor1mwlfQGiZJNwk0RdSYEJMJU488ghwww3B7y4JsTJDzKx69aGKKmIBwFNPBT83bMjWlivKyibEP2uofkmIldXr1WuWyV59mZGxYhlilrlKJlldkekoVJelHU2c15UIkko9WdB0um1lGtxOsmhSxcqMxJYLy6QJZZU2WIPJ8qgDSLgixHT7S7+UsWXyN7/5DX7+85/jJz/5CQ4++OBYqP73v/99Z53ry/J9gRDrUh+SMsSE/zNCrCJoqiLyKv4kEWKiINZqtQmkkBDjB21IiHHLpJghJi3DOkNMKm6hMxF7dA+boiDGcs90hBgrFSEmhOrbCDS+56PZappniLHvp14P2imCZRJAE8LbHnE/tiDEZEGsbwgxlaDwmtcAv/lNYFVLetAuCbH8K48cmiLsgyaEWJpgRG2Lsn+p2pJvKqnL6yYhVh4/Zc206jVBLI+XGUU4d2csEiEmiTwuRBdKrpIJRZaUu0SivwgUmTLriSJ26cghou1QSyERaDNSlpVCrLT9nk3z1lz1nWwvNfiede2Ifae0ZZtZ1i9lLIjNnTsX55xzTh59KQuIEGJdAxeTLJPToiAWCDa+cPDIJxflAUaxTAJtoUi2TOpGmZSW0SbEwkVmJMSankCo2VomJyeB7dvbv7P1ZIQYRRCTCTFqnxRV8SqYxjSmTdeL9bMggpgnIMS5EWKEiwt9ITOMEGNUztNPxx+02e8lIZZ/9VquDiuXglhelkn2e5ogVhRCrLRMljWTq1cFMZeEWBHvHwzLiBAzCKbXCU+AoYWRIKYk3Q+a5IOZCHBJbVEscEYWRiHTy7YtihBEFStNlpc2cqKJ3dMJIWYoRKa1Q+0XyTKpmaZfylgQ++pXv5pHP8pi5QmjTHbr2p1ws11RZIiJhJis3CuV8iRySiTEgHa2lhSq3/AAtKAO1WcVI8TalkcgW4ZYZkJs8+bo7zIhRrFMRgixCt3GqSjZMpm6bQpOiLU84UbANSHm9QkhphIBkoQrU8ory7xl5UOIFWHbmlgmXRFi4vxJgrm8DFVblGnk6TpBiJWWybJmcvWa+O/SMlnk+wfDkh/ASVSQJWVFteWZ5EalWSZNLIUmgl/SMk3ELp2Yp8wQ0wkquswyg/UT+2WbFacSD63z1ghRKaoMMUpmWVaRLtJ3y++Gsg36pboGIZWVUJ4Qqt8tpTbhTbYvjDLZJsQUlklIlkkVIabLEAPa2VosQ4ydBHmovkmGWLhaGQmxhiD2kMPn5QcR0S4JtNfTxDIpEWJZBCi2HswyaZwhVgRBrFLhhBggXLiE/dUFIebUMlnkN7xpgpguVD+N8soyb1m9m0NjQoix/YIqYuWdD0YlxFxaJim0WUmIlTWTqwzVT64irI+jihFiljZAE3EjMp1ueSaUVcL3ShHzbESspH6ZBOZTtlVacLsNsUWl97ISYmmWSVcDHijtpQTR1pY8NB0tNauQ3C9FIsSOPPJI3HjjjZg3bx6OOOIIrYJ49913O+tcX5bvt0P1u3UTm3DjHiHEmskZYjIhFjnAKBliQNwyyZRuH0BDEDtMRpm0JKmYaGQk9iQ9bD7zTPR32TLZagUPelRCLEOoPhAnxMiCGGWUyS4QYoAgWIm7mAEhxrZjPFQ/HfsmV5Hf8KoEhTwIsWazvR0ajZIQo1SvPlQV1TKZFyGW1TJJEddKQqysmVy9Zpns1ZcZGSuWIWYZFG8iPAEpGVsEUYmyPHGZlOWZCE9p07mikNKC20n5bgYiHUAT6kjZWSmh+jaCkVY8FOylWW29VJqORDFmtOv2S5EEsbPOOgv1kAQ566yz+hqpy70ihFiXKjFDrH1y0RFiXBBTnUSTHsKIGWLNcHGkUH1+kQkXmZEQa3ptUc3aMikTYrJlEoiOshnpiIIQyxiqz8RBcqh+UoZYo3iCWEW8WbQgxNg+DpSEWITKSaK8Gg0zQkxnxVTNW1bv2m66EapfEmJllVXc6jVBrFdfZmQsCiFmlYulEZ7S2jIRjNIyZU0D5VP7lGKZdGWTi2SIqeAGg7ZMsqyAFFET9OWljshpk+mVhRCj2C8pdl0F6ZjZollaJmmC2Ic//GH+/4985CN59aUsIBDE2H8LliHmNVvwWoEFkYkFkQwx6S2A8m0H1TKpyxCDKSEWfNYIkSHbDLGGD3peV9J6JglijBAD0gUxmRDziX1SddOWEGP966Zlkgkmvg9PEAj4BcKSEEsM1SfclJGrCDfoSdWpDDFx+jJDjFYuH6qKRBmYZIh10jJZEmJlldWd6lVBrCTEIkUhxOR7LVuKTJlTRRC7KHY7smWSsn4Eq2BaW5TMK0reVQt66slEwKGIN5G2HC0vqYxC5yniYcq2MgnoNybECIKf7aAI/VLGZ+Zly5bhGdn2BWDLli1YtmyZk071e3FBoVs7ZtKb5WaTj+44xSgcVYZYOI/ygmNqmZQzxEwEMeENh2gtNBUymNBkZZm0IcSmpjpHiEkZYuRQfVacEAvWs0iEWCRDzEIQm2q2v4NKK34D1LOEmIll0iQHLE1MKwmx9MqDECvCPjhTCDFVW3kRYjqqtSTEyur1KkP1k6tI5+6MRaKjZNHMktiKkDUEksyEEKNaJknrR7BVJi2TQqSZkEPUtjKPnEik92yINDK9Z9mWihDTWjkJlklq3ppRNp1uXy8JMXNBbM2aNWgobtQmJiawbt06J53q9+KWyYIRYqIgNhlmiJEsk+IBlnQhJ2aINcNQfaNRJiXhyJSkEi2TmUeZlMVkFqpfq3HxL5EQC9tMyhCzEWisCTFWBckQ8wRBLI9Qfa/V3u7ckmmR2dZusABvrJPKJFTfhBBLC9UvCbH06lVCbKZkiKmmKwmxsspyX2WofnIV6dydsUzELj4NhRyi5lQRBCOduKYTecT2KcuT51EtL+0zkvhkuK0oQhY1BF71uzxf1hEyTUcAzdqWvC8kTUcRqEwEuEi/KGKX5b7eL0WyTALAj3/8Y/7/a6+9FqOjo/z3RqOBG2+8EUuXLnXbuz4tbpnsVgeSbqQbDVSaAU00rSHEYpZJFSEm39AzoWh4GNixIyaI8ZNEqBL6hAyxCLEmWgstLZNND7wN61D9JEKsWgUGBgKrqEmGmDDKpI1AY50hxooLYr1HiLF2vFZwLMrZBT1LiKVliLkixJLsk6p5e+DG30m5tN0U4SGRFVVUEqfNYnOUp+sEIUYVqMoMsbLK6j3LZK/mP2asWIYYQfixJbZUIxnqxC6TkPuk+0ESpUOg5Kif2eSf5T3qo+l3bCTmUQi4FDtr1rZsM8soWXHkbWXZd8o26JciC2Ivf/nLAQQb7w1veEPkb7VaDfvttx8+85nPOO1cv1ahQvVTLJPiAS1fRBghFjnoVULA5s3t3/fcE3jggcRQ/bZlMpy+A9bCthgHuvhkapmsVgNKbHw83TIpEmIZLZOZCbFuZoixbVupRDLElIKYASHGREHeDjwArRgh1rcZYraUlypUX8wQSxPTyuq9h0RWVFEJSM8QcylQuSTEXIbql4RYWb1evXauy4MQ64Hj2hkhJtscM1BPvmSesrUmiu1Ts82S+pQmWsltUagu021lK+CYZKRF2rIV84giDykrjpJZJm3PtL4b2RyJNJ0zcbSPLZNkQawZnniXLl2KO++8EwsXLsytU/1eXBBrduninXSz3WyiEssQawtDiYRYmmWS2SVHRwNCDIhniLGLirxtTMLnmZhlaJlk4pcTy2QaIQYYE2JZQvXdZYh11zKZJyHGRGBfuuj1LCFmkiFmkgOmIsSoofpFEG2KUC4JsSLZbopqmcyLENOtH9vvXRJiRTzPlFWWrnpVECsJsUiR6CHpQT4tx0k1j9w2ZTRAXZ8o5Jc4HSVbKun3xPkIWWOUbZWat5ZxFEaTjLRIWxnFvDQboElWnEkQfpa+y98fNW+N0i+tJbQkxMwzxFavXl2KYTkXu8R1LVSfkCHGAse1lsmW4oKjegjbsiX4OW9eWxRKJMTCk3yj84SYVai+fMPCrKHs7yxDrFJpr3s3CLEXv4DWRmKGWPEskxXxGdAiQ4yNpMqOQwruTK4i39C6JMSSxDNVuyUhll69Gsxc1FD9XiHEinieKassXZWh+slVpHN3xqKIXRTLJIVUUs1rSzRRyC/q8mxso0llsq10oiBFuFN9RtmeqblYJgIVhd5zIFZqp1FkiJEGBKDQgkRCjEKu6b4bnQDXL0U6M1955ZXkBteuXYtf/epX1h0qC+3g+K51IOFBIcwQA4DJlsIyiehJQXmAqciYiYng5+BgWxRiRI8Uqj/N2iRkiMUIMRcZYq4IsUWLgp9M+GKWScCcEHMhiA3PAmCRIdZNyyQTTHw/YpnkgpVjQoxtdyeWySITYiYZYqKoJf+NfcYqq92yrHxsN0XYtnlmiM00QiyPDLEinmfKKktXvUqI9dq5O2OZCgCAveiimpci8ujEDZ1QQl2e1TZIsQEmLV81L9miabndTUQ6IIUQk8Q8SrZamp016XdqW2TxULIwUrZV2jSUnDsTGrKfQ/VJa/6FL3wBBx54ID796U/jvvvui/1969at+OlPf4rXvva1OPLII/GMPJJeWUbFLnGFGGUyJUNMFJciIfZIUPBVQhETxAYG2oIYK4kQa/JQ/c4RYkwkavigj1SZliG2eHH0cxPLpCT0OQnVD4k/6wyxRrcJsbb7O2uGWEwQC/cjaogqqYpwg55UJpZJ1e9iyaKWPK1oDysJsfTqVcrAZYYYVVxzRYi5JNLEv1FFupIQK6sXq1dHmey1c3fGsrFMkkglomBEEnAsRSVV+7aWUMo0yn45EuCSprMS/FLoPSOxy5K4U7ZFWT8H4iHFfmm8PEd972fLJClD7JZbbsGPf/xj/Pu//zve9773Yfbs2Vi0aBEGBwexefNmPPnkk1i4cCEuuOAC/OlPf8IiRr+UZVU8Q6xb97CaDDEuiHFCTJMh1lIo16o3W4zyqte5wMJLyhBrsBNlo3MZYs4sk5OTwPbtwf91gliaZdKR0CfOw+yBqdumiKH6EiHmLEOM7ePh505D9WcaIZZEcskil7yddYSYrl153vKBPiiXlEGRRNk8LZMUwYga0K/qF5VIo4766MoSWhJiZc3k6jVCzOXLjB4ixFyF6puKLkZtWQpBqs9tRR7q+tkIRtR8MmuBykCsjORwWQqfZELM0b5gai/VElsU8dfz4MHj7ST1wRV92S9FDtU/88wzceaZZ2Ljxo249dZb8eijj2J8fBwLFy7EEUccgSOOOAK+ixN9WW1CrFsd0GSIsVD96fCkpc0QUx30OstkvU4gxIKPPVNCTCCpsmSIMdullWVy8+b233bfPTq9bJmkWkGXLXMTqk8lxJIsk10O1fcEOo4TXJaCWGyUyVChZiMP9V2GmCwc6AgxWVhII8TKDDGz6tWHKpc5XC4tkyUhVlZZ3aleE8TyeJnRA9dFG3HG1panmtdVLpZTCimLAEfYVjYCXNJ0rgRN/nlLP50r4k7ZluW+QP3MqbDreRw6SewDhTwk7lf9UGRBjNXChQvx8pe/PIeulMWqTYh16eKtyRCTCTFRhJG93exgjRyEqgs5E390lskYIdaMzitWQvh81gyxRlZCjNFhs2cDQ0PR6U1C9UWh78IL0dx7M3DHe5wQYjMyVL9SgVepwGsFx457y2RQfUuIiWKiLgdMnhbQjzIp54+VhFh69epDlY3Ik5WgctWWDSGWRRBzKR6WVVZRqwzVT64irI+jyovS0YouhsvLZGHMyxKaRYCzWF7SZzYW1EwClSPBz7YtyjRJn7kWdsUQ/7yz9/qh+nfNC1xMEOvaKJNJ1o5mMx6qr8gQI4Xqi+0aEGINBG1zsaMDhBgT0ESxJ1VUsyHhqKH64vb0PDQW70Hrk6LkDLHUNpIsk9PdzRCD73OLsSvLJBcJwwPSaah+Ed5YJ5X88CwLYkmiFpBOiOlC9UtCLL1c5tAU6aGKKmKJ+wQ1Q6yIhFgnLKFUi2ZZZRWxSkIsuYqwPo7KJuTeJRVkK0pYh9y7soS6tAFmIcQc5lR12s5qlUfmUDy0FRhV8+YttPZDGRNi8+bNS9xpBgcHsWLFClxwwQX467/+aycd7MfitsBuXes0lsk2IcYsk4QMsTTLpI4QCzPERNsiAHiMEKNmiM2d29kMMd3gAaqsNJNQfQjrhfb27ltCzPcBz4PfAprIgRBrRT93Eqo/kwgx6kiRgBkhJgtiJSGWXr0WNM3KJdVFJaiKToh1YtCAssoqavXauS6Plxk9cFzbWNKc5kZZClTU4HYb21qnKSvbESWpy3NqL3VoZ80rvy5pOisRK8O2cim09kMZn5k/9KEPwfd9nHHGGfjoRz+Kj370ozjjjDPg+z7e9ra34YADDsBFF12EL33pS3n0ty9qJmSITSEUBYSLO8/5CgUa5UgabHrxwZlCiHHLZCj+6AQxL0r0tNACvvxlNA86MJjXNkNszggauy2ktWFDwhEtk6wYgZcl00rOECOPnsmqmxli7IGREWLsYybWZg3Vl0aZdGqZLPIbXp0wJWeIZSHE5FD9khBLL5cPVUXaB/MUsVzaL0tCrKyyOlO9RojlMUJwDxzXrkYpdBo6byOUJAlwFhbNIlpCqZ/ZCprK6WwJMUta0JqyIsynXJ7D/cqWdKQKu/1QxoTYrbfein/6p3/CW97ylsjnl19+Oa677jpcddVVOOyww/Bv//Zv+Nu//VtnHe2n4hlizS5d7FQPCuHPGCHmt3chdkAyckkZqh8SRVwcAqKEmIqcEtpIzRATBTqREDvySDT+sDvw6H3G1kImEjWGZ6E5vAh4aiNdOFKRcEmEmKFlUibEbEL1nRFi4X7RTcsk60ObENMINZpKJsQEgRV227u9kAK/4TWxTDab0W1bEmL5Vq/m0FBFHhvLZN60mUuLpvi3ThBwZZVV1Oo1QaxX8x8zFomgshAlyKKLLSFmK7rYikpE26GN2OU0KN4hIUYSebLYS3MSY5OmsxKxMgi7JSFmVsZ31ddeey1OO+202Oennnoqrr32WgDA6aefjkceeSR77/q0ukqIJb3tlgUxGFgmxYOQhcnv2tX+zIQQY9QZo01k4UhBrGUlqcT1ItsT0yyTKmuoqWUS2S2TzjLECiCIxTLEGo4JMaRfrMlVhBv0pDIJ1ZcJMZ0gJk+rGnUyad4euPF3Ur36UJVnEL5LQkzVL2rfqcRWSYiVVVYZqq+rIqyPo3JFNDklxFwGt+dlCXVpAySKa7Yh9y7FQ1eimW1bVJrOdp9xaf21EZvLUH2Dmj9/Pq6++urY51dffTXmz58PANi5cydGRkay965Pqz3KZDcWnnCzHf5kofpNRmoRQvUjBxgjxMbH258RMsRiofotqK2FSYSY0C/rDDETQUxnmUwi4RghljbKpLReLNPKJlQ/Jv6YEmLhejArbREIMZ7x1RAeFA0IMbYdZUKMmuFAqiITYmkZYkmUF5AuiMk5SlS6rAdu/J1Urz5U5Ul15R2qPxMIMfnYK6usoldJiCVXkV5mZCwr+isDsdVpIi03kYe6PIf5Z0oBBzmKPHnbWSnrZyEKJk7nSMRS9itneq8fytgy+cEPfhAXXXQRbrrpJjz72c8GANx555346U9/ii9+8YsAgOuvvx4nnXSS2572UbFLtt+Na3fSzb1EiLESRRgu1IRroAzVd0SIkQQxmRCzDEPnYeqtBr0Nm1Em2WcTE50jxFiGWMMiQ6xSaQ960E1BrFJJsExmI8QYNefzzx0KYkW4QU8qnWWy0dATYroMMfl3WUxLyx8rq3cfqmZyqL4NIZZFELNZHvu9j292y5phVYbqJ1eRXmZkrNwythwKVJmC2y0II5fLyzKSoe/53FmTNJ3LnCpX343LYHqb74Y6Xd7fM0lIJgqt/VDGgtjf/u3f4qCDDsJll12G73//+wCAlStX4pZbbsFzn/tcAMC73vUut73ss+IjKXZj4Uk32+HDaUX6M4UQixxwjBBTCWIqQkzKEGuCWSYRiEbUDDGhX8YZYuH0IiFGthYmCWIqQkzMVzMhxBQjflIrJv6YEGK1Gu9TISyT4cecTHQVqh8im04tk/1IiMm/m9gt2fT9/kDfq8HMLjPEbNrqNCGm2+asXy4JuLRlllVW0arXCLE8zt1FvH8wLMoDuJXtMEPoPEls6DDJ0+mgeNW8eY9k6Oq7oQp+eYX4J35mkyFGFHZJFk1Lwa9fylgQA4Djjz8exx9/vOu+lBVWVy2TGQgxWRDjo0yKB5zOMmlKiE1MxPtLyBDrmmUybT1FsbDTGWINiwyxgYH8BbFWC7jjDuCAA4DQks0rwTLJytYymRaqL09nVUW4QU8qkwwxE8pL/t103maTE4l9Wy4JsSJRBv1AiLmyTNoSYuXxU9ZMql4TxHr13J2xxAdwsnBRREKMasvLmeShCGdO89YIy3NJbOVqL7X8bqwJsQwWRit7aRmqry0rQazRaOCHP/wh7rvvPgDAwQcfjDPPPBOV8mbLSbFLnNeNi12KIFaR/lyptHchTnHpCDGdZVKVrSVniIWilgdERTVWBEIsS6g+OZjflBCrVKLbxiJDzEagMSbExLebnSDE7roLOO444KyzgB/+MPo3Jpj4PuB5MQG54ooQC/e9XDLEinhDa0KIpVFeuqB8G0Ks38ul7aZIlEGeIlanM8TyDtUvCbGy+qF6TRDrVbo3Y4kP4InCBYHOp4ouroLGXY4GaJWLlcUGaCmu2QqR1Hvn3ESeDPuCSwEuz9FEXYmH/RyqbyyIPfTQQzj99NOxfv16rFy5EgDwyU9+EkuWLMH//d//Yfny5c472W81kwgxlWWSCTXKDDFLQkymz/wWgO3b4/0nZIgZWyZDiqrRbJgTYuJDfVqGmLhtLAgxm1B94wyxTlsm164Nfq5fH/9bKiEmfOCAEMtllMkiiBFypWWI6SyTeRNi/V69NvIaqzwJsSyCmA0hlneofhZCrKyyZkr12rkuD0KsB45p8YE8i83RJBfLtK1Mwe0EwYEkXNjaHC2pLtXntlZVW3rPmQ3Q4bbqtFiZxe7pcvTSfijjJ7q3v/3tWL58OdauXYu7774bd999Nx577DEsXboUb3/72/PoY98VJ8S6snB9hljcMtnWVHPNEGOWyabQj23b4v3PmRAjj1Qp5oGxSltPqmXS0XqJ8xQ2Q4yJIipBKyFDjJWvo480xb7buGXSoSBWhBv0pDKxTJpSXiUhlq16lRBzmSFmQ5tR21L1q9Oh+iUhVlY/VBmqn1z9Roi5pJ5chZETRSVXlkmXoovLAQFc5lS5+m6sxVFbAs5SgHNqZ7X8nql974cyJsRuueUW3H777Zgv5PksWLAAl156aZkr5qi4oFBAQiweqq8RxMKLdeSgVFkmKRlikh3TowhiMiHWyQyxWbOCn2Nj7c+oofqGGWJZQvXleYwEsYGB/EeZNBDEXBNijJpja+EULS7CDXpSdSpUvyTEzKtXbTdFtUxSxC7XofolIVZWWaVlUldFvn8wLBIh5tAm5yzk3qFlsuO2Q5cZWzbfDXV5OX83rgZryDOTLUtWnEu6rR/K+Mxcr9exXWFV27FjBwZkMaMsq2pniHVh4aah+kJuHCeXwjXghFiaZVJnJWQZYjxUXyDEtm4NpqkKum6OhFij1aDndTHhz8Qaapkh5oIQY5Vqu5QzxMK+5C6I6bJ7kgQxS0Isbpn0Ip/L01lVkQkxnWUyKyEmt2XyHRVxW3W6ejWYuaih+i4JMVMBLgu1pvq8CN9zWWVRq9cEsV49d2cs8fkgC8ljKyTkLfLYjFjpcnlZKCSbfmXJW7P5bmypNdV01gRcnplsLoXWDOJoP5TxE91LX/pSvOlNb8Idd9yBVquFVquF22+/HW95y1tw5pln5tHHvqsiZ4jFQvVFQgxqQizVMsmEIlWovkSIRWxsTBAT58sjQ0yYntNYaW2oBLG0UH1bQqyZgRDzMxBitRrvFxfEfMeCGBNJdIRYpRJYJnPPECsJMacZYq1W9Pc0QayI26rT1auUAVVUsrFMFoUQo4Tqu+qT6vMifM9llUWtXhXEeu3cnbEihBhVVHJI6dgGjduOnOgsQyxLThVRPHQlBuVKiFkG76s+txXg5OlcZrKRM8QsqUKnL/pneBmv+b/9279h+fLlOO644zA4OIjBwUEcf/zxWLFiBT7/+c/n0ce+qzYh1oWLt2GGmCpUnwli3N4oHqi6USZ1lsnwwG63ibZlMkkQc0yIAYJIktUymSVUP09CzDRUP+yXuF808iDEbCyTzgix4PO+HWVSR3WlEWJy6SyTJSGWXr1KGeRpc8zSVqcJsbwGDdD1q6yyili9Fqrv8mVGEdbHUUUyxHIWsVTT5R6q78jC6HIkQ6oI4oo2sw7xdyTAZRGobPLWXI5qmUU8dEVD9ksZZ4jNnTsXP/rRj/Dggw/iL3/5CwDgwAMPxIoVK5x3rl+r0IRYzDJZ4/+XRSttqL7KSqgL1Q8P2kioPiPEau0+5JkhBhgIYqaEWFKG2KxZbVEt3I5Jo0zaCDSZM8SAmBg17RPbopTOMsn+lhCqXxEJsSyCmPS5PJ1VFfkNb1qGmBwOTgknV/1dFtPS5i3itup09SohlqfNMW8aqyTEyirLffVqqH6vEG+OikSI2VjNslBPFiKWU7tnBuqp0/lnrgYNUH1uLcA5tDCa5K0pIRBpmtTlUek2R/ZL6nHTD2UsiLHaf//9sf/++7vsS1lhsUtcV8BFwwwxFSHGyCVumcxCiOkyxBghVqvx6fIgxETRiAetZxHEKKNMMgFneLgtiCUQYmQbp6IyZ4iFn+UmiFEsk74PeJ4iVN9AbBHnS8gQky8mNhbV9kIK/IY3LUPMJFRfrpIQy1a9+lCVVw6XbrqSECurrOJWr1km8yDEekDkpljNbEZ9dBrQbyk2qD7PPVTf0fJUn9vSWHlST1ky0pzaLwmko8vvxiq7LYNY2Q9FEsQuueQScoOf/exnrTtTVlDNEA3rCiGW9DCRGKqvGWUylPYiB2pahpjJKJMiISaIM6xcZYiJ/Z9qBoJYqhhiM8qkKBay72H2bKEj6gwxl5ZJ6wwxMQoqD0LMapRJO8sk2z/Yd83aLQmx8P+6UH0TQsyULivitup0ucyhKZIoSxWxbDLEikiI5Z1rpvq8PH7KmknVa4JYr77MyFgUq5lLC6OrEHinowEaEFudpJCsaCyXYqXl90z+bmyINMJ2d5rJ5lJcy2Cf7YciCWK/+93vSI31s7LosrqaIZb0MBE+fMih+r5gmYwJYrpQfeook1KGWMQyKWaICfY9VkkkVZYMMbL4ZDrKpBiqPz7eXo/hYaEjCYSYw1B96wwxQW/qliAWC9VvFjhUv0hihFwmGWKmlJe8HBNBrIjbqtPVazYiVnlaJl3SWKp+2RBinVi/UhArayZXrwpiJSEWqQhZQyWVLIUZ5XSuqCeHYh4luN1lThWVNlNVnmKl7ffs8ruxIfMo319Sv6zzzwjCoFKMJYqH/VAkQeymm27Kux9lCdUWxLqwcEPLpEiIyRQXzxBLs0zqRpkMrZDcjsmoM5kQY2KShhBj/cqSIab7LFJZM8SY2KQSxFwSYqCdfOU+AOisZVJ145dGiE0bjGAozicJu+1Q/T4hxNIskzpCzEQQMxXTiritOl39Qoh1ImPLlf0yLwHO5frppiurrCJWGaqfXEUQ+BxVXoRYliB1m2wpp3ZPjWDEXuw7FZUIbWUReZyOnOjyu7GxzxL20UyDQ1iSh66suP0MNllniJWVX/FQ/W4s3DRU349bJmWhRkmImY4yGTtJIJohRhhlktNlphliCotlahtZRpkUt43KMukwQyxGiKW10a1QfR0hVqlAFaqfNUOM/56HZbIIN+hJlWaZLAmx7lWvhurnlcNFnS6L+GRjYUyaRrYUq6okxMrqh+o1GjYPy2QPHNMkQowgcFgLCRSiyWHWkyubnEsCjpSLlWH9XGZ6dfy7sdivqPux021luV+VofrtKgWxAhYXxJpdeAA0zBDzq5oMMVWovukok1KoPl+WTIipBDGJWOOEWIYMMd6ttDbSCDFxZEwgLoixQQLyJsRcZYh1UxBLIsTE48eCEOO/hz+pb51IVeQb2rRRJmXrl4moJVazGZ2+JMTSq1dzaFxmiHU6VL8kxMoqy331mmUyj1D9HjimKVYzV0IJdTorUinnEH95OpfB9KTlEQW4vMVKq+XlnMMlT5f3vqeazjb/jLqt+qH6Nz2toNUSLnDyA35HyjBDrCJkiLEDKWaZFA8wk1EmKxV+IxM7aEVBbGBAbZlkJBUL1bfMEKOo+LFi66kixAYGgn6Kopgcqs9EQkKGWBZBTLaPWmWIVSrdsUyyB+KkDLGWYlpCxQkxT/15vxBiOsukqagllmmofhG3VaerVx+q8swQy9t+6ZIQKzPEyiorqF4TxEpCTFk2uVgkGsYlbeaQQsok4AjTkW2AWQgxkUjLIMDZipW2Ik+uAywQ9qu86UTqdC4zy/qh+nfNC1ottC/YXdFpDTPE/EpbQOGWyfCmg0yIiUKRLIiFpSTE0iyTjKSShCPTDDHP88xPGswymRSqD8TXlW0bANi5M/hJGGUyS6i+8Xp1OkMsAyEWEW9NRpmUtmM7Q6xPCLG0DDGdZdKEEDPNHyvitup09epDFXtQZef8TlsmqSNWqtrKi0ijEnAlIVZWL1avCmK99jIjY5GEhA4HqdsIJVlysWxC7jPlVMl9T5ACSIRYjmKlrchjnSGW4buh5K3ZiKNOhc8M300/VCmIFaxEQqwrlknjUP22qBOzTLIAfPGgZBTU5GSbLmHLkAmxajywny+rhbaQlpYhJgtHFllbxsJRmmVS/AlELZNAe5vkTYhJ28LIMilkiHndFsQ8Ly7WOiPEgp9OBbEi39CmZYjlFapfEmLp1auEmLxelIwtl4JRFkIsL7qtJMTK6ucqQ/WTqwgCn6OiEGI21i+XIfCucriyLE9eZqYROR0SaTakErktS5Gn05SVPJ1Ti2YGMs/KVtnHlskyQ6xgFSHEunGtS3oASArVrxAyxFSh+kAgEInLkwkxURCLXTCEShtl0oFwJM+TSmPpLJMqQqxa5cJOZJsQMsSyhOrH1sskVF8gxDwE+2vLy8kySSDE5NN4RBBj4gvhZrQjofpFonPkSrNMloRY96pXCTGZnuiEYOSqLarw5MoyWRJiZfVDlaH6yZV2npxBZWM1s7XSUafLK4craTqXoktelkKnWVYOM7YyZZbZCJ8ZtpWrIHzldDnvx/1QJSFWsIoQYt3oQNIDQPigG7dMximuWIaYyjIJBPQUsxECgVDk+20hLI0QYzUwQCPEWh20FqoskzpCjIlh4vYBOp4hZhuqD7S/k1wIMV2YdVKovvz8RxRrEgkxGO4D2oUU4I11UqVZJvMixNLmLeK26nS5tN0UiTJwaZl0aWGcCYQYJbNM115ZZRWxes0yWRJiynJGIVkKCbaCisvl5SlQudpWTkc7pNJtliJPrgMsZNhWXc+my0DT9UOVgljBKkKIFdAyGQvVr8Ytk/Loh54sorCHnl272iIR+xvQJqfSMsTE+QwyxDoiHImWSXbTkkSIVavtGxw2HytKhphDoS+1jYQMMSBnQUxHiFUqMdum2J9YWylVEmIay2QaIZanIFbEbdXp6jUbEas8M8TyziPrNiGWdTuUVVYRq9cEsZIQU5aV1cwRyZO0TJtwd5diHiVDzKXtkNJWFotmrhZGhwJcp8VKkojlUlzLsO/1Q3V1zb/whS/gsMMOw5w5czBnzhwcd9xxuOaaa7rZpa4XE22ALn05STfbiaH6BMukfHJgFJQ4muLAQPtGQRSKpLZZebIgljLKpLhdO2ItZMJWs9leR3HwAKAtjAnr2XFCzFGGGNBFQYxKiBHFmjJDLCVDTLwJz0KIyfOWGWLplQchVoSHKpcZYi4tkyUhVlZZM7+KJIiVhFikXI1kaCskpPWJvLycqSd5Our6ZbIU2tB7FEuoSwtjzpllNtl0nRYrk9pyKST3Q3VVENt7771x6aWX4q677sJvf/tbnHLKKTjrrLPw5z//uZvd6mpFLJPduNYl3WwnZYhVa/z/7OCLWSblE5Y40qRMTQFKQUxrmSQQYhGh0UGGGNkyCbRtk0mjTAokXEwQIxBiTrPRLDPEgJwEMdmuJ1ZOlsmYSMgFMcNtpasiiRFyyW+ydZbJLBliJSFmXr0aqi/SnuLvSdPppnEZqt9LhFh5/JQ1U0rcx3uFhs3DMtkDxzSJELMQlZJGTrQS4ByJWEnT2QhULgm4TAKcRVA8VeRRinkEkcc6hyvvbUUQbV2OyGllq+xjy2RXQ/Vf9rKXRX7/+Mc/ji984Qu4/fbbcfDBB3epV92trofqG2aIVSptQYxbJsMLP/sZOwgZPbVrV5wKA5TkVCbLJFp8hEmgQxlitVrQl2YzEMRGR5MzxJIIMd+PWigTCLEso2caW0EplskKsS1KyXY9cfnsbwmh+rJ4m5UQc2qZLMINelKJN+6Nhj5UPyshRiF+5H71c/VqqH6elsmk6eRzi65fSb+r5i0qIVYeP2XNlHItiBWBqMrDMtkDxzQpVN9hLla3hQuSyONSoMpiKbQQD7Osn40NUCeu8dieDMJnntvKpdCq7JONrbKPCbHCjDLZaDTwve99Dzt37sRxxx2nnGZiYgITQubUtm3bOtW9jlWEEOvGg0pKhliMwqnGBTFtqD4QtUyyC7uKEBMzxGJvUBCdXheq32rxnC2xnyZlZS0cGgJ27gxGmpyebt+8qDLEWImCWK0WFQo7QYiliYUEQiypbauSBRNxW3U6VN+lZbJIYoRcojjRaMSFg7wIsbR5i7itOl29GsxMtUzOZELMlSBWEmJl9VP1ivjfq3b3jGVjNcuS9URZno3VjGKlo7blVHTpNPWUYf2s7JeEfmWxl+ZJ0xXCzkoUK/uhumqZBIA//vGPGB4eRr1ex1ve8hb84Ac/wEEHHaSc9pOf/CRGR0f5vyVLlnS4t/lXhBDrSgf0GWLxUH2NIMYyxHSWSTFDjJXKMplGiKkyxAThyHmGGIUyE0eaFAcP0BFiIhEmkm9AMiGWIVQ/U4YYE8RC4TIXQUwkjmTBRLzB9LwYUcl/Z8JqSYjRSqZ1dBRXJ0P1i7itOl29GswsPyz2YoaYK8tkSYiV1evVi4RYr9rdM5bV6HyOyBqnI/g5tExmyakihc5bLK8j+W42FkZCvzINeNBhO6sVLZiFrCTuC/1QXV/zlStX4ve//z3uuOMOXHTRRXjDG96Ae++9Vznt+973PmzdupX/W7t2bYd7m3+JhFiMcOlEpWSI6UL12YHFxKdEXFW0TOaZISYSYs1shBj1YhspcaRJURBj60chxFSCWJ6EmEmGWEKoflLbVqWzNWkIMY9tJc9rb9/MhJhDQazIb3hlcUIUqmTRyjQYP8u8RdxWna5eJ8RcZIi5FMQo4pNLQszGxlkSYmX1WvWiIJaH3b0I5+6MlZvoQqGsHFr38g6Kl6fLQiG5FJVcWjRdEk0kK66F0EoR11zSgqQsPKoYm4He64fqumVyYGAAK1asAAAcddRRuPPOO/H5z38el19+eWzaer2OuigQ9GCJJJPX7MLFLsUyGQvVr7XJLp4hFgo1qaH6u3a1/29IiMVGmUzJEIsQYjYklTAPWQhhgtjYWJuE8/32eqWF6idZJiVCjK1bUTLEyG1RKo1OYsuXMsSCZTeCvzkixPpulEmKIFYSYp2tXs2hcZkh5tIySRGf8iLE2HTyd10SYmX1epWh+voqEt2bsUgCVV6iUgeEGVdB8fJ0naCQbEQXV7bDpOlsBKpM9F6O2yoLTUcixAj0V2mZbFfXCTG5ms1mJCes36oXQvVjlskkQixtlElNhlikHwMDwGGHBdMffnh7HocZYhT1P1Yqy6S4nmmh+kRCjNFvPZkhRrFMVioxQowvWxQgi2SZLDIhprNMphFipqH6JSFmVr2aQ+MyQ2wmE2Ly56pjoiTEyur1KgkxfRVhfRxVXlazTLZDC+semQpyJYJ0ONOLTFk5soQmlUsLo5XQmmVb5UTvuRQYS0KsS/W+970PL3nJS7DPPvtg+/bt+Na3voWbb74Z1157bTe71dWKhOp3pwPR31MyxCih+rGDXiTEVBliqlEm5ZOg7wMIO1OrAUccAWzcGIzmKM0jE2JZhSMyYUYV/jJmiGWxTDrJECsIIZYoiKmEHU3FqLk8LJNFeGOdVKLQCJSEWJGqxXfG7G0V6aFKFvoolsKk49mGEKO2RQnVz1vMKwmxsnq98hLEeiVUv0cJMapQ0nEhQUWIEYUZG9tapzO9SMvrgC3PVe6XbVtZsuJcbSubETkz2VmJ69cP1VVBbMOGDXj961+PJ554AqOjozjssMNw7bXX4gUveEE3u9XVihBi3bjYmWaICZZJLtQwy2RaqL5JhpisrNcGAOwKfmHCzNy5kWlUGWIePKsD3ooQEy2TtoQYYZTJLKH6xtRTkTLE2N+YZVJYfsUpIaYWdnueEGM33SaEWJkhlm/16kOVS8tkpwkx6vJsLJMUAS5r1lhZZRWtepEQ69X8x4xFGg3Q4YO9TbZUFqsZiaCyyNgqir2UFNzuMvfLxl7agaw4VzSdy+W5JCv7oboqiH3lK1/p5uILWRFCrEiWSYUg5jcRsTUmEWJayyRxlMmYGKESxKRSEWI2OVvyfM4sk2mh+gMD+RNikoiWun06nSFGHWVSJsSYG7xSMSbEYvsadzo4vHAUmRCTxYm076AkxDpXvfpQVVRBLC9CjE2Xlg+WhRCjTldWWUWrXhTE8rBM9oDI7SoXq9NWM7aMxOechHmzCFQUmi6vzDIyseXKdpjxu8krK45CY2XZVmR6z0bw0wh1aftxP1T/SoEFLZEQSx1l8rvfBc47LxBcnHUg4QEgfBAWQ/UrLWgFMT7KpC0hpskQ81QCmlQ85F/IELMVMSKWSaqoprJMqqyhulB9gwwxG7EvEyGWJoi5OLFah+p77b85yhDrm1EmTTLEms3oOcOEEGs29W2rpu/36tVQfZl8S+pTXpbJLAH9NoRYlrZKQqysXq9eFMTyCNUvwrk7Y7nKxbIhtrJSSLnZ8nKmkPLMLHNmO8xA78XaotJ0FKE1y7ayyRBzKfhl6Hs/VCmIFayMRpn89KeBb30LuPVWhx2gWyYrTShzvlJD9ZnoY0BOqS2TYSURYoKFkxNiFrZCwIFlkpFwJoSYLIiF65NnhpjTUP1OCmKepybELDLEEkP1fYeCWJFvaGVxQpchZiKAyWUayF/EbdXpyoMQK4JQIouwLm2OebdlS4hR6K+SECurH6sXR5ksQ/WVRclCsiHEKG1lCcKXP3dqy8s5p4pKbLnKLLP5brKsX6wtqnhIILby3lZkes9C8CPRZn1MiHXVMllWvIxC9ZmYxMQWF5VimRRD9f0EQoytA0cw5YOVCUVJofoqQUx+KzAgCEVplkkhQ8zaMunlYJlUZYjJofq1GnDyycC2bcD8+QDihFgWsc+YEBMfxNMyxIx7oyhdhpg2VD8HQoz4ZopURRIj5DLJEDOxSMplSpcVcVt1unr1oYoq9Lmiuly25TLkviTEyiqrDNVPqyLlP2YsV7lRZOGCIGLlKhg5suW5zPTKIlCRvhub3K8M6yd/noXec0qIEfbRPAcNyEI69kOVgljBSrRMpj6osIfRLJSGXAYZYmmWyVRCLMkyqRplUj4JWhJiLiyTxoSYbYZYrRbcyP3858G+kJAhlsUOKs+TKhiqCLFwH4gJYi6esykZYpUK5FD9LIRYbORN/qxuOCKnrorwxjqpdBliLgUx+fsoCbH0ysN2U4SHKnmfY5/pMraKSoi5HEHSJSFWhO+5rLIoVVom9VWE9XFUNtRT3lazXAUjioWRQj25JMQcikpFWD9qW1YDAmQQ82y2VSZx1MaqWlomyypKcRGphfQbWPYAmeWhNN6B6O+aDLEkQkwO1Y8d0NRQfU2GWIQQS8gQixBiGUZiBKLrQG6DEWJZRpkEgpsfP/ki7jJU32mGmIt7NYMMsfwIMTXpaLsvhY0FP4v4kCoKjYD+O8hy7jG1XxZxW3W6epUQk/c5QN0vSoaYjWDUCULMlSBmS4gV4XsuqyxK9aIglkf+Yw9cE1092Lu0mnU65N7GMkm1e3Y6s4wU4p/z+snTZaH3nFpxbTLEOi0e9rFlshTEClbcZthCMQgxkwwxgcgKZk2wTFJD9XUZYhTL5EwhxHSh+oqSt7PLUH2nGWItBydWnWWS/S3MEBOXpiTEbAUxfh9raC/VLmQGEGKqN9kuM8RMabMibqtOVx62myJsV9U+lyYYZbUKdpoQc2WZLAmxsnq9elEQKwkxZVmFn+dsNcs15D6DpdBVkHqnM8tibeW8frG2MuS7uaSsbOyzTgeHyJCF1w9VCmIFKyYi+RRBjD2MFtUymTSMqyiIWWaIeZ3OEPMtMsRUofop60kRxFjlEapvlCHWCUKMYplULL/iCYIY275ZQ/VdCmJFJsRU9jVWeVomS0Isvdh30yu5OqxUD4tpglEnLJMlIVZWWZ2vMlRfXz1KiLm05WWxmuUqGGUInSeF+FtkepEsoRlFF9PMqyzrJ0/nlN5zKY5SxEqHhFgWmq4fqhTEClZM3PAAOiHWCcukQag+Wwe+Lkmh+llGmRwwyxDLkrMlz+csVP+oo4Lfjz22/Zkcqq8oOVQ/ix00U4ZYWqh+Jy2TkHBr9n+XofrEmwhSFfkNr4kg5pIQS2uriNuq0+WSECvSPki1THY6VL8kxMoqq/NVhurrq0jn7oxFIl0IlFWu2VJZRBCKQEWkkHIjxAh9z0yIGYp5TgmxLN+NQ8qKRIhRiTSC4GdDOpah+mUVpiKWSWqGWCcIsXAZWstkeLAZheqryCkmCgmCUOztT10gqQgZYllGYgSiJwkyZZYm/B11FLBlS5QKM7FMuiDEeiVDDFF1n6+HRah+UoaYb7qttAsp8Bte3Y27LGJl6b+pkF/EbdXp6rdQfblsMsRmEiFGoSZLQqysXq/SMqmvIgh8jspGSHCVU0UVXTIRWwRyzWX+GcmiaSHAkUUXR2JelvWTP3dJ77kURylEWhZ6zyYLr58tk6UgVrBqi0igWyZdEmIpGWKUUH22DlzcM80QO+cc4OabgQsuiLXNimSZFAmxZjZCTBSOrCyTqvUEogKY/DuREONiX+EyxIy7Ey9x3056oOTL9wA2kEMGQiwmEnKnQ58RYhRBLKl8ny7oU9sq4rbqdPVqqL7LDDGXofqUtmxHmewUIVapBPcKPfDwXFafVC8KYnlYJotw7s5YrrKQnBJiDgUjp5leHbYBOhUPDcU8pyOAZiAPbUL8M1l/XYqjDknHfqhSECtYFS5UP0uGGCdrpJND2iiT++8PXHNNZBarUH0VIWaZIZaLZVJVFoRYFrGv8BliFEIs3AfFPSQiiGUmxIKfuYTqF/Eh1cQymVSVirt1Y20VcVt1uvIgxIrwUEUlxPIK1U86N4iZba0WzTKZtD0p/TKxaKYJxeI2bTSK8T2XVRalelkQKy2TkSqk1cxGlHBpmXRp0aSILlmC6XMaEMDlCKBOCbEs24rwPbu0hLrMwuuH6l+zaEErQoh1wzJpmiEmWCatQvWJQlHsRFI3JMS6kSGWZplUVQZCzGbdjIPiC5wh5gv7iM/66TBDzPcdCmJFvqFV5Tmxop5r5HlVbSWV/MDA5i3itup05UGIFUFopGaIddoyKffLlupSzWsriMl9ova9CN9zWWVRqhdD9XvV7p6xKGSNTRh5J6xmrgQcl5QVRTy0soQ6tOVRlteJXCwKbWYT4p9lW5EtoY7suvLn/UyIlYJYwYqLGyaEWJ6WybQMMeEBRhSgAEHcSwrVT8oQU1TsxFU3EI5cZ4hR20gbZVI3D0AnxDKE6svzGIXqd9syyR6IlaH6fvtvWQmxJtuP+4wQy2KZzCKIJc1bxG3V6cojh6YIQmNRLZMyuaYjtnTbk/qZKSGW1I74eSkolzXTqpdD9XuFeHNUFCHBKvw8i9XMRpQg2vKy5J+5GsnQqahE3VaGYp5T6onalitiKwNNZ7M8skWTsB/3c6h+/655QcvIMske8PO0TEoZYiaWyfaImQmEmAE5ZRWq7zJDzI+vZ2rlZZnsJiFWRMukghCrsFNbpeKOEHMpiBX5htaVZVL3u828RdxWna48cmiKIDSqBLFOWiZdEGK67Wn7WUmIldWP5fpcX4TrbUmIKSu38HOHogtFcMgiSrikglyKPDbfjatML6qoRFpehqw4K8Evw35FXT9X2XvyMkrLZFmFKbJlstXqjCAmWyblUH0/fkGIWSZ1ofqWhBgpVF9FiHUyQywvy6REiGVZN3keq1D98IErLog5uPm0HWWSXSBEQsxaEOszQkxnmSwJse5Wr+bQUC2TRSbEdKJZHoRYmlBM6XtZZRWxZOoyaxXhXFcSYspyRllZiErkDLEMgpHVgABZCDGHNkBXmWXy504JMZcWRoJ4mEUctbH+dnxEztIyWVZRikyIieKAS8tkSoZYjBBTnHTkUP3YASZaJm0JsUEDa6HjDDGy8EQZZVIuC0IsC/1mnSFWrbb/XxBCLGqZFPrGCDFry2TwsxxlEmWGWLfL5YNiEXJ1WFFD9SkZYnkQYrptRSGxukGIlZbJsmZq9bIgVhJikcorF8tp7lcWwYggStjYPbOILuSREx0RafJ0LnOxslgKXQ5A0M3BITqRhdcPVQpiBasIIaa7eIsiWCcsk+EyoqH66pMqW4dUQmx6Gti5M/h/miDW5QwxcT5jy+TUVCCKAemCmEWGWJZ1s84QE/vWrQwxaqi+ISEWG3mT60OGI3Lqqsg3tDrLJLVKQiyfyoMQK8J27UaGmHjdzJsQo4xOqZouCyFWWibLmqnlWhArgvjfq/mPGcsVseXUamYjSnQgVN+VmOeUSKNuK0PhMzMh5sjumZfgl9QvK3ovo72UIq71Q5WCWMGKiUh+C/obWMqbcptKsUxGCDFp1phlMrxYxw5CkYLati34mWaZlE+6lhliXbFMAsDWrcHPNEFMXBcqIZaBfrPOEKMIYi6evyiEWPjAJ+4hbgkxtbBruy+FjQU/i3hD60IQS6K8KFVmiCVXrz5UUQkxm9yvTmWIUUSzpOUnfVYSYmX1Y+VFiPVaqH4PiNy5hZ9nEBJsRJcsYoNLKsil6EJZP5vtnin3K88BCLIQW44EOGpbro4beboyVL+swhQPok+zTIq0Swctk2KGWCWBEItliMkHoSiIEYWi2InExFooEGIuLJPkNsQ+btkS/EwbZdLz2vMZEmI262adIdYpQswyVN9thhhrqk8IMRcUUkmI5VO9GqrvMkOMmp3VyQwxl5bJkhArq9erFy2TeYTq94DI7coml6eolCUo3krkcUg9dXrkRFfCIFWAc2r3dJXplUGAk/uRd2aZPF1pmSyrMFUYyySjajSEmJ9wsMVGmZQPMN9vC2BMEDMM1bfNELO1TEYyxKht+H5b3GKCWBohBqQLYjIhloF+s84QIwhinoubNd1+zn7PO0Msj1D9ItygJ5WLG3edIJbWbkmIJVevE2Jpo0xSyGj5GuYyVN8lIUYRzlTrWBJiZfV69aIglgchBsz449pqtLwshBgcCgmGIo/TrCei6EIZgCCLiJVXdltWUckqQ0xFiFkIfmSaLst2cJjvVlomgyoFsYIVOVS/U4KYlCEWDdVXnyjYOiSG6gPAqlXR9g1D9T3LDDFbEUMUm4zaYLbJzZuDny4EMZeEmGdIPbETpyhgKgQxvwk3RIIu50dDiFXYqc0FIZZHqH6R6By5KJZJW1ErrV3dvEXcVp2uPAixIjxQqR6AbTPEXBJiJmIXlRCTXzYlTZdleeK85fFT1kyrXhbEXBJiQDHO3xnKRkhQijwdDj+Xp8tEWVkIVFlEl6KOZGhFwLm0FFK2lUOaztV2yDoCKKWtfqhSECtYRQgxaoaYS8tk0k00s0yKofoJJ/GYZVJ1EJ5wQvR3U0JsaFa7nwkP2F3PEAPagpgJIcbmIRJiWUL15RNhKi5LzBBLzcCjlgvLpCNCzDcVD3VVhBv0pFLZ1+SyFbWyzFvEbdXpyoMQK4JQIh7LumPDVe4XtS1XhJj4mYm1sswQK6sfqwzV11evEmIZLJPydFlsjmRxjbA8KxErZ/ulzfplJcRMhcgstGCsrQxCpEvBz2rwBJfiIaGtMkOsrMJU1wmxpLfrqlD9lvpEIYfqKw9CWRAzJcQW7ga87W3Ahz6UeNMkEmJZgufl+Yza2Hff4Cf7jnIgxLKsmygQksRComXSmSCm288lQSxy46ESxKijTMrCV78SYrobd1NRS2yrJMTsKw/bTREeqMQHYF2/TCyTRSLExM90bSWd40yXJ85bHj9lzbQqQ/X1lTYa7wwqK1teBuuXy/BzY+HCoQDnlHpyaDvMlHllQe85tXsqpnNKiFGFXVfiIZE8pPS9H6ra7Q6UFS0+MmO3QvVTMsQiofoJBxtbBz5ipuogzEqI+RXgsstI87TQykRRyfMZUWYnnwz8+tft39NC9QHjDDG+bhkzxEgCTycFsVZL/9CqI8RYX0TLZFZCzGWofpHECLkolsmSEOtO5WG7KcIDFVUQ63aovktCLM9RJlUCXHn8lDVTqhctkyUhpiwSIWZg/UrMLhamUf0/qU+q31X9cCqU5GxhzEuYSVqe/LnTYHqXFkbFdGSbo0Nh13QfzZpZVmaIBVUSYgUrfjIH9A8qeWeIyTfRLENMZUtjv8uWSR5GrjjA9t67TU8BxoQYRYyIEGLNLhFiJ58c/T0PQizDuolCn5EgJgp74b7iXBBLC5iWCTHVvmlBiCVniNHeTNEWUiAxQq68BbFqynuYkhBLLpcPikV4SGSlskzaZojZhOpTaTMd/WVKiNlaJl0SaWWVVcTqRUGsJMSU5ZQQM6SxOhJy73B5rtrKi4BLWl6sLQoB1wEboA0h5oqAU/2u6kfe9llqW/1QpSBWsCJbJinWEZtKyxDz2w+zSYRYLEMsSXE+/vj2/w0JMcpBqyTEOp0hdvzxUQGAIoi99KXA4sXAc56j/HMSIWYj0IjzkOi5TmaIyQJWkiDGBLkkQSxrqH6CsNuzhBiFQrIVtVR/o85bxG3V6cqDECvCdhUfgHX96jYhRhGoSkKsrLKyVS8LYiUhFilSDpdD61de4ecuhZK828or3F3blqGYl1VUciXm2eTXObXi5iyOyvOXhFhZhalIqH43LJNsmQmWSV94EPahPtiYQJOGLkdsk3kTYg4zxIxsl7NnA89+dvt3iiD2/vcD69cD++2n/HNShljWUP3CWSZlASzpd2WovtC3rKH6/D7WoWVyJhBiLjPEylEm3VSvhurbZIi1WjQLY5ZQfUpb8oNuGrFFFfyS+mU6qmV5/JQ106oM1deXuF1m+HFNseXZiCAUcc0pIdZh6qnTbTkVqDKIWFYCXAeWZyXAORIPXVpxy1D9sgpTEUKsm5bJJEGs0hZCEjPEwnXQhuoDbUHM8xLtgaxs6Jy8MsSMTxgnn9z+P0UQA7Q3gi4JsUKH6qcFTOtC9dmAD+IopNaWSUaIObRMFuGNdVKVGWLFrX4P1Xc5CmM3CDHf14vheRJiM/zBuaw+ql4mxFxbJotw/s5QVgJV3hQS8QW8sXCR0XZoOmhA5rYsKKQsmVckcdSGFswgmpFtjjZ5a66E1qzfDaHv/VClIFawsiLEOpghVqmIlkn1wRazTCYdYIccAlx8cTBSZMpbM+oFUTVNC13MEAOA5z+//X+qIKYpkRBrCftIR0L1i2iZVBBifN+0IMQ8z4tevPmLXYeCWJEJMfkcoKqSEOtOuaQMirQP2mSIJU3j0jJJob9MKDIXgp8tITbDH5zL6qPq5VEmS8tkpGxEEFd2yE5kL7kMpjelrKhtkWg6hxSSq2mo/coimjklxLpsxc1CyvVDlaNMFqz4yIwmGWJ5WCYTMsT8alsI8RMO7lioftIB5nnAv/4rqVtWlknBwtm1DDEAeO5zA+vk5CQwZ47V8sWKCH2t9n6QNVSfRM+x71zMfOuWZVIbqi/0zZAQA4JtyW22/UqI5WWZtM0fK+K26mSJ69/LhFhWS6FLy6QrQkw8pkpCrKyy9NWLhJjLdeqlUH2XhJhhW50QXUzJKGpbZNHFkZXT6YAAGfpkJVZS9peMRJpNiH/edJtL+2w/VCmIFax47lZBLZMUQowJYVzcc+BJtrJMMuHIQYaYsbVQrFmzgBtvBHbuBEZGrJYvlrgt2P4CdChDjAl6c+cKjRRDEBMFWm6ZtCDEgrYEQYzfx/YJIVZUy2QRt1UnS5VDlaWKkKvDyiZDTPW7OJ9LQoxCY1FzxlwTcCUhVlavVS8KYiUhpqxI1EUHcqqKSD25JOCcii4OBaqOi5WGghFVxHK172mnc5R/5tI+2w9VCmIFq8JYJuUh61WEWIJyHiPEHNzUUA9s1TQuMsQyhw4mjBhpU+K2YFZQwJIQMxX6zj4b+OxngbPOan9WlAwxx4QY/3+CZdJ2XwJQjBv0pCqqIFbEbdXJEvd/l4RYEYRGl4JR0jVMLPmzThFipgSc6tpOEeBKQqysmVy9KIjlYXcHZvxx7XJ0Poq41nHqySXJQxEPiWJeXoIRabsTtlXWfDdjyiqrzdEiby1vus2lfbYfqhTEClaRUP1uWCaT7Bg8Q0wI1U84EcZGmXSgOGcmxLqZIea4xG0x3Wx/9zb9Ml6v2bOBd75TaiQnQYyaIRbuq+Kop1ysFQmxjIJY3xBilDfZJSHW+RLX3yVlUAShsdOh+pR2VG25IMSyWiZNQvzT+lVWWUWsXhxlMo8BUYAZf1xbBcVnERIsiDRX4lpWm2Neds9MgpEFhZRFNHNpA7QSlRzl11H75Urw07VVWiaD6l8psKAVIcS6YZk0yBCTLZNiZhdACNU3qCwZYhFCrBsZYo4rQogJGWI262acIaaqblkm2e+KUH2lZTIzIVZJnMa4iiRGyEV5ky0KF6rSCWJpYk5JiKnLdYZYkURZlxlippbCpHZUbWUVqPIQ/EpCrKxeqzJUX1/idpnhx7WVLc9VLpZD0cVltlQnLIym9JfTAQEy2CptxENXtlHqdFlHfTTd7k7tnn1smSwFsYIVmRDrUoZYNFRffXCzdUgN1Tco6kleNY2YIWYr+jgRjhxVVwkxZSMFsUyKb6DEvrERMaemyItWE2LmomzyAgokRshFsUyKpIuq0ggxGzGtiNuqk1USYrQMsSISYi5D9SmCn0oQK8L3XFZZlCotk/rqM0Ks46KLBfXkMlvKqYCTxe7pUKDquFhpaId0mcnm1M6aoe8u7bP9UKUgVrBiJFPqKJOiIMb+f+edwN13Z+xAws09C9WvtkcXTArVLzIh1hOWSZEQEzLEbIS6TIMF8BlDy2K3LJMKQqwiEmIWgpi4LXKxTBbhBj2p5HOAqrIQYrbzFnFbdbJ6OVQ/z9D5LIKYDSFGDdWnCH4UQkzVljif/HKrrLKKXr0oiJWWSWWZUlZA/gKVUwHOJqfK1fppwAFju2dRthVxX3BFWeX13VCn64R9tiTEgiozxApW5FB98ca50QB27QJOPjl4+N+4sX0TbFopGWK+0K4vHVxJofouBCTqSSkyjUiINbMRYoUSxIR1Fy2TPU+IpQhiiYRYES2TM4EQ04kuWQgxNm/SupeEmLp6OVTfpWXShOrStWPalmmovivBj02XJJCVgnJZM616URBzSYixdprNYpy/M1RelI7LDDFXI1ZmJnlE8TCBZ6HSPqb2y6wh93mJlaRcrAy0IPsbBzwcCX5Z+5XXCKDdfr7tZpWCWMEqYpmkZohNTwM7dgBjY8HvExP2ghi7aCdYJiOEmCQuiQIUgNQTiElRT4KR/giEGBOObA/2CDHUbUFM2J6iZdKGxBO/w5kuiEVC9TMSYrmH6hfhBj2pqJbJThBiaRa6firXhFiRtmselkkdGUUZzVHsQx6EWNZRNKmEWJHF97LKUlUZqp9eRTp/ZygrW16GB3sb22EWm1xuofoOCTFXgwZk7ZdLUclVRhr7G4dVHFlCqf3q+HdTWibLKkqRCTE5Q0z83eDBP1YGGWJplkk+ymS3LJOCQOcyVL9IGWIuybfCherLRFdKhphILPK+OBTEfJ92MaMtoMAPqVTLZFZCjDJvGlHTT+WaECvSdqVaCjsdqm+SD2ZKiLlaP9V0KvG0CN9zWWVRqhdD9fMgxIAZf1x32mpmNbKgIzuk7p7R1L6XhRxy2ZZLsbLT9lnKNpD/5iq/jtyvTtN7fWyZLAWxgpV1qL74sO9CEEvKEKu1CbEktVu2THYtVF8kxJrZCLFCWSYVhFhXyTeFIFbpJCEW7qtezoJY3xFiaZZJW0LMRBBLE0j6qfqNEHNFULnMELMN1RcFv6yWSRNCLE2AK6usIlavWSZdn7uB7q+To7IKuc8gUHWV2KJaGCl9d5kh1oGcqiLmu1G/G9PAfArVpetXHvtC1rb6oUpBrGDFc7fSBAVRHJiedkeIJb3tZhliNdEyqT4RFiZUX0WI9WiGmBPyzTZUv1uWSfa7yjLJ/lOptAUxmTj73/8FLrggyN+TSimI9QshRhHEshBippbJIm+rTlZehFgRHqjyIKhMLJMuQ/V1y0uzhLomxNIsmgCwbRtwxRXA5s3qv5dVVqer1wQx1+duoGeuiy4zvUiEmA2RlkG4IOdUUaggQ3up7h7VVVtOR8g0JO6o/aJsT+22ciTmOd2PDb8/alvdfr7tZvXvmhe0uM0QKLxlMokQY0JYnoRYxzPEBCHNWjhyVKpRJl2sV+EyxAwtk1FCzGv/je3L8nHxiU8AX/sacNttsUVHtgt7tpQz8/7/9s48XI6i7v5n5t7ce7OHQEII+xJ2CDuGHQmbsqOsKiKLCwiIgvC6IMpPcBcRldctiAuICAi+oMiSGMISQsK+EwEhYQtJyL7c+f1Rt6ara6qqq7trZvrOnM/z3GfuzPRUV/d093SfPt9TeU5sm32C7iJ0hpgugCU5xNT3dIfYT38K3Hqr/bOtTL0cYkW4oKpHhpjLseUzmqP6uo9I51NWWUSH2C9+AZx6KnDlleb3CWk0rSyI0SEWI2gJY0pnTd6yvJAOsbSOpry5WKH6num7CTk/j+nyiHS+06V1J4ZsqxFuunaAgljBCFIymWI0vRoSBDFXqH5NyWQdHWJpR5kMmSHWbAVdXXZZMtmSGWKpQ/Wj9dLhE6ovnWEZHGK5t4Ei390NnSGmT5tWTJOf/e9/gc9/Hjj9dPtnW5l6jTJZhAuqkBli9SyZzOsQq0dGmmm6NA6xd94Rj+++a36fkEbTaqH66nxbKRctAGmzrPTPqKQVu3yFCx/nWp5pavqVJ1sqgyMtz7rK5HoK5Ljz7ZfXNL7lrIFcXfpnbK+HypNz9stzPbQ6FMQKRixUP80ok6EdYpYMsVjJZDlBEJPlnwEEpNwOsRbNEMvrfAuyXDZBLO/JZ0pBrGQQsZyCmNxnDAKyOVQ/4EijRRIjdHwzxLLkgAHpxDT1swsWiMdFi8yfmzcPuOkmMcpuK9LKofohM8QaHaqfxiGWVALsM/plaIeYPP7lOW8gJCStFqpfD4dYs0W+QKQWG3KWHWYqbcvhrPEVqOoh5oV0pIUcgCCPuy3kAAtBHWIZhNZQAz/4Zpbl2a7aAQpiBcPbIaaeKIcM1U/IEOvo6q5OajsQSiGsuiwhSiY9lXVbf/JmiAUJnw+EySGWtU+lUqm6nvpdhpjDIVZdGy5BTD73FMTU9Z57pNEiiRE69SiZDOEQW7FCPNocsJdeCnzkI8D119vb7s+EHqWsSKJsyAyxZjnEfISuJMdWMxxijuMgIU2h1UomGapvJa3LKq/Ik6m0LZBwkXfUx9QZaS7xMK0jLaB7L9f8PMXKtG4z3wyxUK4uV79SC3AhhV06xEhRiDnECpkhZg/V1x1i1Ty0ECWTnju26TMhMsSClBYGwpQhlifXTH42s9DXd2HW7Awxk4gVyxDT20vpECt5/OB5U+ST2XqUTIZwiKmCmGm9zZ0rHt96y952f0Z1/YSgSA4D35LCNBlicr/3EdYqlfwjSPpMUw/Bz9RWFocYBTFSFFpNEGOovpXUTqWcIk+mDLE8wkVIgSqlC8l1nlqPzDLXdKH67ju/uuW75Sg79BZa0zod8wq7nmW9rU77LnlBka6qREFBL5kM5RBLKJmMOcQ8SyYL5RBr0QyxPH2Sn+3vGWLxUH2lb0UsmSzyyaxvyaSvy0svr0xTbmlyiAFmIaTVL+zr5RArwjboG6qfxUHl48SyzS9UhljIUP20DrGkY02r7zcAMHt265ZStyKhhasiCWJ0iMVILQTldYhlcdbkEBLqJVDlXVepM8Qa4EJKuy349itPDldNvwK5ulz9Si20+q6rnIJzq0NBrGBUXVVpQ/VDOcRMJ9uVijlDzDLqXj1C9X3vCpj60w4ZYnlca/KzwTPE6imIqY6Ojtr+ewliaUsmQ4bqF/lk1qdkMo9DLK2YpjvEAPPFe6tf2Id2iBVpGwyZIZalZNL2Wpo8Mp9p6jFogKktdVtJ+p5D7jd/+Qtwwgn2nL9m8NhjwCabAJ/6VLN7QnxhqH4yRb6ploLU4ecu4SJtTpWnsyaPCymLQBVKgAvppgvpQsojzPhmloUSGH2nq9doqSG347zbaKtDQaxgeJdMujLEQo4yKV8zCGK2Hw0phAV1iHnesYlNY3KIZc0QKwV0B+VEXfa8Qp/62cJliOnbsbrNG+64xn5EcjrEYplx0mwRchto9gm6i9AZYrojLK2YZnKImY5xrZ6FFNohVqQLKpOjKWvJZJZQfdNr6udCOcTU7bkVHWLf/z5www3AlCn52wrFiy/GH0n/IbT430qh+kW6oZGDupUB5ihH8x7JMJBzSJ+u3s6hkG1lWVe5BEZf916gElT986HKZ139SivA+WaW5RVRWx0KYgUjFqrfzFEmdUHMEKrfUe5UP1lbMinLPwMISJlKJuuVIZYjrysERodYMzPEGuUQU9sznGAmOsQyZoh1SLNFPRxiRRAjdLRSVCNFdoi16mh5rewQ83U0hXaIqduwq+16OMRM/XId81x9L4pDbNky8Vik8sRWF8pbkVbLEKtHqH6RbmjkIFTpXs10IV1IecQ1T4EqbQh8w0fkbISbLqXw5D2/gEKrj/jkm38WauCHkCOA0iFGCkOiQ0yecNY7VN+nZNI3QyzADpYlVL/qWKtUcofPF6lkUiWkQ6zwgliCQ0wdZbKjUorek+Ju7lEmA24D/cEhliZDTJ+23g4x0zEu74W9PLYWlXo5xIqwDdajZNInVF+98aMfb+rtEMubkeYS15rlECuiKF3EPhE3rSaI1SNUv9nLFIJ//xulv/+9+jSvyFMvZ02oEHjXeWMogcp35MRQbWXJWwuVM+ZqK+TopaGEyIaPyJlhfkW6vm007bvkBSXmENN/6P70J2DoUODGG+sXqm8rx6iG6vdUX7apzzWjTIYomcziEJMlk6igFzlLJkMGquekbUL105ZM+mSIqftUM0P1i+wQ8y2ZVIWZzrhbNNEhlmeUSSB8yeSPfgQMGwZMnpz+s42iXg6xImyDzQrV153QtrZd7q80JZpJDrE0gp9rXZmmaYQgVkQ3VhH7RNy0siDWioOiZOW881C64c/Vp3kdLPVy1ng513KUVdb0K8d68M3FClXCmCnTK0cJo697L23fc383Id17aZ2OvuWlObfRVoeCWMGQIlLZJIg9+KA4qXv44doMsXqXTEqHWKxk0uwQk86wkKH6vrXQps+oDrEgJZM5AuxD0Lah+nlLJtU2lW26KaH6RXLn6Jhcojq6y8sliJlGmfQtmUyTIZbnwv6BB8Rx89FH03+2UfiUsqah2ReJKuqy5c0QS1My6RLEfB1iaUsmQzrEbO4v30w2oD6CWJHcWHSI9T/qFaqvtt1I6hmqX4Tjd1YWLBA3//sIKVx4uYIa7eTJKVCldWzldtMFXFehsuJCijwhBzyoV75bHjE2pHjYDlAQKxixkkn9JFdeEK5YUVsyGcohlpAhpgpiZZsghkp1OYD6OMRShepDCdVvgZLJujnEihaqn7JkMmaRVi8IVUHM5BagQyxOlpJJdR0DbodYmpLJRjnE+sNFczuE6tfDIeZbMhnKIWaaLmSovo9DzLcEFQi77RdxpFc6xPof9XKIqW03ktDuXrWt/iyIrVwZO7PPWwaY2qUTMnvJx8mTU6BKm4vVDDddKLHLu2SyKA6xQKJZlrZ8RDPndJ4iaqtDQaxgOEsm5YmdLojpofohRpm0ZIhhwACU5SRaqL66U8nl0F/PSm6HWMBQ/WYLYip5s9HUzxbOIeYqmVT/76jtv9xG0dERF2tMF2ymUSbVESXlubnnSZkXRb6761MyWU+HmC1vTA3qDu0Q6w8XzfUqmSwC9cgQ83F1qceGUA4xVz/VbT+kQ8xVMtkMh1iR9qMiinTETasJYqFvZqhtFeGGRlZWrow5xPJmPTU6Fyu1kyenQJVWMGpG3lqe7zCt8OQ9P4+2fIXWPFlxQfPdUq5PV1t0iAmKc2VPACSE6kuHxPLlteJAgzLE0NVVFQhsJZNAJNQAdXKIebRpdIhlzRArBXQHBUAuW16hT/1s5rJLKUg13SFmKZlUxRpTSY+PQ6xU8v7x9KI/OMSSMsTUH840GWK6mJYUyG9yiIUO1S9iqZdOvRxiatvNwhQUH2qUSZ+RIZPabpZDzFUS6hIPmxWqX8T9qIh9Im5aTRCjQ8yM7hBrsLPGuwwwR8md7830UI6meok8QcvycpYwevULyevdVwiqR/5ZQ0L8PYXdoDf7+zHtu+QFJeYQ8y2Z7O2tf8mkIoh1GErIAE0Qq6w2vp4V34OuaZrgGWI53FihkMsmSyaZIaY7xJQLQpMglrZkslQK6xIsskPMJ6sqr0NMfZ5UbumbIRaiZLLILpJ6OsSaLcz6Opp8MsTShOp3dCSLSmpbecU131B9l4iV1iGWdOFcj1Emi7QfFbFPxE2rCmJ0iMXRHGKNEEqyOGvyCEa+5WihMq8yiTyBHGku6hY6b+tXQDddsBEyPbeFeoT4O9tiySQACmKFQzrEjKH6NkEMiJcThRDE9JN7eeERc4jFL4LVnU8KNUAYC2YWd06rZogBikMsp9CnfrZwGWK+o0z2bRsx0VJ1iJVKkWCTQxALehelPzjE0mSIpR1lUn3uEsTSZIjRIZaOZl8kqoTMENNv6viG3Lva9hGoXA6xtKH6su++GWJ5HGIhyxyLuB8VsU/ETauG6ocUxFrBIbZiRVCHmM95elBnTVpHmmfJZKiyvEa46TK593IIcCH7FdQhFkg009/LtR3rTsec22ir0/wrexJDCjclwD9DDAgniNlO7pUT9Y7quX1cQFF3Prkc+utZySJGhMwQ6wgZqB4A3SGWp0+FzRCTApg8iTQJYspFYTxUP963quhiCpDO4BDL7RIsqkPM5Ioxobu80jjE0rjL0jjE8ghi/cFFEtohpn4HzRZm02aIpRGoktpJGqlRbSurQyzt8rkEsSI7xJghRkJQT4dYM4519SiZbCeHmKdQ4uUcCuisCSlQNVp0CeVC8jUrhAy595lnahdZTvdeqEw277ZSimb6/G2v0yFGCoN3yaReKrJsWfR/PRxiyutVh1iH3SEWyxAL7BBLa9ENkSFWWIdYJX+ofu4MsXoLYl1d8edA7YUxtO9Izlq+H8IhlsGlaKWoDjFfQSytqOVyiCWJaSaHmOkYl+divD+4SFrZIVYPB5VvyWQaQawVHWKtXjLZH/ZtEoclk8n0d4dYpQKsWhU7m8/jjNLfy5Ox5eus8Spz9DxvDCa6ZBF5PASqskUuyJKLFfR79ul7wPnlCczPVBIaaFvw7hcdYqQoeIXqmxxiqiAWYpRJ/URauYCQ5Wiukkk1Q6wQDrGQGWI58rpCIZctRMlk4TPEpCBmyhArmw/kZV08kA4xT0Es5gisR8lkf3CIpSmZTCp7DOEQUwVRjjKZn2aXEan4lkzKbUBuM67QeR9Xl2/JZEiHmE+ofn90iK1eHc2jSOJTEUU64qbVBLHQy6O2VbSbar707Y8xh1jIrKdAZYC+0wXN9MqxHoKKPAFzuEKG6qcVInPPL9B2lakkNNC24Gwr5M3+fkz7LnlBiTnE0pRMhnaI6SfuSvlauXo+rpVMKjtVrGQysEOMGWLRsoUI1S+sQ0xu4yaHmPy/w1zKai2ZzDHKZNBtoKgnsy6HmE2oApKD8XWHWJpyS9Pxo16h+kW6kNcJ7TJodhmRStoMMbm9NbpkMoRDzGf5+qNDLOGY2jRkv9ToB1Js6imIzZwJzJ4dpl1f6hmq3+ybGVnp2y/Vb7gR2VJpS+n0dq39ylmO5uVcSytQ+Yo8PoJfzhLNUKKS7zxDlmjWY7CGvNtCWtHMNU/ffrU6Tb2yv/zyy7Hrrrti6NChGD16NI466ig899xzzexS04k5xGwlk8uX108QU0+k1RN35Qe93LfD+JZMhhCQfA/ysc+oDrG8GWIls/DSLKoOsZzLBfSDDLEQJZMuh5hhf6l7yWRRT2ZNrhiJKnqlLXvUR5n0/awuvElcDrEsx7/+4BBr5ZJJ0++OSwxyCWL1CNUvqkPMJq5lcYjlFYMTjqlNo6j9InbqKYjttRdwyCFh2vWFofq1SEHMwyGWpdQsaNlaoLK8vK6noCH3PsJgSvGwEaKS7zxTu8h8v5s8DjHPKpNQDjhvh1iG6+tWpKlX9pMnT8ZZZ52FBx98EHfddRdWrlyJgw46CIsXL25mt5qKdIgljjJZ7wwxW8lkuVwtmdQdYnUtmQzlEGuxDLGqQyxAhljhBbHEkklVxLKUTJrcEJ4OMSBa723hENNP3lXhKmvZY9rPpnGItfook60cqm9yJmctmczqENPbMu0LIR1iprZkH3wEP5e4RodYRFH7RezUc5RJAHjhhcYe8xiqX0vf9Yy6RvIEpOufz1seFqrkLmTovI+gUq+RGvPmcIUSlUL2K1PIfY5tIWT5bNqSV72P1vm1sUOsM3mS+nHnnXfGnk+aNAmjR4/GjBkzsM8++zSpV82lOspk0UomTaH6jgwxKdSIpgIIYp6249hnVIdYyAyxvCMMBqAeGWKZl6sZJZMJDjE5Eqo1VD9DySQg1nulUmldh5ivIJbk8nI5wkI4xEKH6veHnKF2cIiFKJlME6rv4xBLKnM0CWKutvKG6vusK5+ySkk9BLEiCct0iPU/6ukQk+2//z4wfHjttCtX1kYA+HD77cB3vgP8/vfAhhvG31uyRDzSIRZhcojlEGaA9K6Z3CNWBio7zNRWA8ryQuZwpRWogn43DS5hzLM+fadL6/Dz7VcRDB/NolBLvmDBAgDAyJEjje8vX74cCxcujP21GkFC9UM7xCqVWIZYR0XsPHrJpLrzxTLECuAQyzsaYyxkvQAHDN0hlqdP/cYhliCIlVRxLClDjA4xM64MMb1kMpRDLCmQ38chph6j6BDzo+ih+nlLJvOG6qctc3Stz5Alk2n6niTAAeEEsYRjatOgQ6z/UW9BDADee6/2tR/8ABg6FJg8ufa95cuB444DvvnN2vd6e4FzzwWmTgX+/Ofa92+6STzuumu6frvo7w4xU4ZYyLI8n/KwnCV+6siLecvR0rrb8pblNTqHK6R4GKp0NGjIfUAxNtR6r3GI2dry2I7bgeZf2ffR29uL8847D3vuuSe23XZb4zSXX345hg8fXv1bf/31G9zL+hML1bdliJkEseXLo/9DZ4hpJZMyQ6ysC2LKjqRmiIXYwbKULKoOsZYrmZQOMSn05QjVl2Jf5jb6LjybLYipB/Vyb75RJm2CWG7xsNpoQR1ivhliqnABpCt7zCOmSfTvLGkEyiTaPUOs2RdVIUPnfUomlRs8DXeIpXHAuUpCfR1i7V4yWVShjtgJLYh1dgIjRkT/A8D8+bXT/fa34lx66tTa9+68E7jxRuB736t9b8oU4OWXxf9vvx1/b9Uq0S4AnHZalt6baVOHWNAcLs+cqjzOGm9XkIeQFUrE0udXlByuTOWsOYTITHlrObarLBlijSiN9d1GW53mX9n3cdZZZ+HJJ5/E9ddfb53m4osvxoIFC6p/r732WgN72BicDjG1ZNKVIZbnpC9FhpjuEAOinS94hpjnXRbTZyoIXDKZQ3wKhVy2ECWThXWI6SWTiRliyo9IUoZYwsVbbBAFrWQSaBOHWFLJZFZRK20gv49DLO/FeH8omQztECtqyWTeDLF6OMR8XGtFdYglXTi3eslkUftF7NQjQ2zaNODxx4HNNhOv6YLYnDnAU0+J/999t7aN224Tj4sW1R53fvOb6H8piM2cKQS0228Xba+1FnDEEbkXpUo7OcQyuLryClShXDohyz1Dha3rfQnlEMubw5VFzMszeEJQ915a0awBDriafuUUUVudpmaISc4++2zcfvvtmDJlCtZbbz3rdN3d3eju7m5gzxpPzCHWzJJJ/cRdzRCTDrFy7eYjdz61ZDKEoypTyaTJIZaxZLKoDrEQofqFzRCTJ51yn1dPQlWHR7UbioglZy0vivUMMTrEzLhKJvOE6qs/smk/6+MQy+sC6Q8lk6EdYkUK1Q+ZISY/5ysq2ZxkvrlfuvhUqfiF6jcqQ6xRDrGiOrGK2i9iJ7QgBgBbbSUe11hDPOolk3ffHf3/zjvx93p7hbAlef/9yHG2YAHwl79E70lB7Nhjgdmzo+PLxz8e3dwLQX93iMlQffW8MacbJm1ulK9Lx2eaRpdM5nGR+bblNY2nWBky0yut6yn3gACByiFDbsdpSmOlQSWP0NoONPXKvlKp4Oyzz8bNN9+Me+65BxtvvHEzu1MIpEPMKChIQWz58saWTGoZYlIQ6+ioDR6tOsQCl0z67vymz6gZYlmFjJhjqAiCmHSI5Vwu9bOFc4jpI66ZSiY7zN9LvUomWz5DzLdkUnduJeWAuRxiWTLE9GNcXodYfyiZbAeHWNIojI0M1U/rEHOJTz7zU19rpENMbvuVSr7jUVGdWEXtF7FTD0FMIoUs3SH2r39F/+sOsUceAd58M3qu5hf//OfA0qVRX99+W/yO/Oc/4rk8bwlZLgn0f0HM5BBrgLMmrdusKU6eQGV5jeh7llys3KH6acU1H4EqoPMwj2vNd7pGC5/tQFMdYmeddRb++Mc/4tZbb8XQoUMxd+5cAMDw4cMxcODAZnataVRHmQSKE6pfUzIpBTF7yWRslMkQJZMZapzbIUOs6hBrZoZYYUomUwhiOUaZ1N/PRH9wiOlOJD1DrJUcYqZS2qJhKs3LQ5EEMdOoiFkdVKFKJrM6xEzTpRXX8jrEfEs0gdp9J6uLhRliJBT1FMRMDrFKxS2IyXJJSd8AYLj1VuArXxH/n3IKMGmSEMTefFO02dEBXHaZKJfcZpugi9EyJZNqhljA7KXcGVvaOZ9rGle/smQ95XFjZXHA5XG3ZclIK4rwmSlvLYe7LeR23OjS2HagqVf2P//5z7FgwQLst99+WGeddap/N9xwQzO71VScJZPqxXwTBbFhq8XJ+rCe4TUfN2aIFcEhFjJDLEd5YijaIkPMVTJpCtVX/69zqH7uHLmiOsR8SybT5oDpDjFfQcw3QyzPRa96fCvyBXPokkmgONuhT4aY2kdXhphPqH5Wh5hLXEvrEMsr+PmIeT59Vz+bZ/svqvBEh1j/o9EOseeeA15/PXqul0zqgtjChcDzzwMnnij2n9NPB/7nf8R7b78tMsMAYO21gYsuEu+Hpl0dYr5lhx6ldD5CgncuVs4ywLq4nnKuq7TutqAjNYYUqBrgpgspxvqImlnEPJ9p2rlksqkOsUp/PZDXkViovnqiunp1/PnSpfEPNjBD7Oodv4IHH7kVux50as3H5Y6pZoiFdog1I0NM/VwRHWJ5+lT4DDHPUSZVh1iHnLUtVL/ZJZP9wSGWJIhldYilEdN8HWJ53Cn95YI5dMmkbMuUedVo0uRiAelKJkM6xFyOLV+HWN6SyTQOsaS+64Jinu2/qPtRUYU6YqeRDrFKBfjZz8T/664rhDHVIfbkk8Bjj4njyXrrAa+8IgSxRx4R5+G77y7KJhctEtMvWhSVS66zTvj+S9rJIebrQgqZG4X4OZ9rGtd0mVxBOcS8kAJcmsyrSqWSe/myOMSClTCGLNHMKcaG/J5Tl/7SIUaKgtUhJsslJUuWxJ+HGmXSI0Ns50/+D8766UMo9fTUfLwRGWKZRpnMmbVVuJJJ6RDrW648rrXCO8Q8BTGnQ0wP1c84ymSwUP2iOHN01P4klUxmdYjpIleWDLGQDrH+csFcD4dYUS6qTKHzNoEKSBeqH3KUySSHWFLfQ4Tqh3SIufajtBS1ZLKo/SJ2GuUQW7kSOOEE4KqrxGvnnRe9J7eVq68Wj0cdBcic4wULgHnzxP877ij21+HDo+PSE0+IxzFjwvdf0t8dYjJUX3kpr3CR2hWUs2Sybo4tD9Elbw5XWheSj1MutwDnKx4GEox8jRapxa6ADrFQ68pb8Gtjh1jzr+xJjJhDTP2h0++4ugSxOpdMujCVTIYgmEOsxTLEQpZM9vcMsbJyOCvr4kGoUH1NGMtMf3CIuQSxZjvE9GOc+h2mPf4V1dmiUy+HGND87dBHfFIFcVfJpGHADeeojyEdYrb92tex5TNoQEiHWEhBLM8+WE+K2i9ip1EOsRtvBP78Z7G//fKXkSAGCMFr/nzgd78Tz88+W4hegHCISYfZyJFRX9daS/z/+OPikQ4xOyaHWI48KN/pQjpr6ubYClWWF9DdFrJksijCZ5Z1lUc8LOp2XLTr22bRvkteUKRwUyMoJDnEQo0ymVQymSCIhcy2UvHd+U196a30hs0Qy5sfFQC5bKsq4UL1+5VDTHEsSkoNyBBr+VEmXYKYKlyFdIiFyBALVTJZZAdJPR1iRRLEfDLE0pRMmqZrlkPMNY36WlqHmG1dJc2PDjFSRBrlEHv2WfH/Jz8pcr46OyPB7N13gWuvFefa22wD7LsvMGyYeG/hwsghJqcHgFGjxGMjBLGi3MzIiilDzMNllTfTK21OVaNGTkwtGOVwKtW0FcjRlFuA8y0pDCQYZflu8uSRNXM7zhvi3w5QECsY3iWT+smtKhbU0yGm5wpp6A6xUPbLTKH60iGmlEx2lrPF5sVK6AqgoNfDIZa5jb5tom6CmLz4NZVMdqjfi/K/TRAzjSaYIVQ/mCBWtJNZ04W0pMgZYqFKJovsIKmnQ6zZwqxPDlfaksm0gpjuNgvpEAsZqu8zYqVPn4D6CWJF2o/6y/5NIhrlEJNB+htsEL2/5pri8d13gV//Wvx/1lmiL1IQW7Cg1iEGRILYyy+LRzrE7KRwiGXKegrkrMkrJKTNLHO11eiyw7SZV0EHIMgrRKYtYQzpBMy5fKnzz3wcYr6CH0smSVHwLpmU6Pk7rmn9OiAe1QsT9ULBt2SyT6gJpTZnKpns27ErlUo1fD5r1lbRLKVVh1hvAIdYUUP19ZLJpFD9RjjEQpdMFu1k1nSxLdEzxNLkgNV7lEk6xLJRFGE2jcsK8HNQ5S2ZDOkQ8xGxgOgYF9Ih1oySySLtR/1l/yYRjXKI/fe/4v/11ovel4LYnDnA00+L/484QjyqJZMuh5iEGWJ20jjEPEvN6pK95Du/QKKSs62ApXShygDV9/KKld4lhSnFrjzB+/p7eQZ+aPSolup7eddnO9D8K3sSI+YQc5VMSgzB9sEdYupJZMoMsXo4xLxD9VWHWG8+h1isZDJHgH0oqg6xnIMFqJ8tbMlkd7d4TMoQU38Ac4bqx3+Q0D4OMZOzRKJniLlcXnqJpDptmpJJm0PMlSHGUSb9KYowW48MMXW7ylsy6eMQ8yn3TOsQMy1fWoeY61jTDiWTdIj1PxrlEDMJYjIHbOZMsf/19EROL7Vk0uUQk7Bk0o4M1VfPG3NmS6UVebzKAH1FnoCupzxthSyZ9BZdZHlpTrEy07oKJR56blcNcYgFdB7SIeYPBbGCYXWI2QQxKRao5DkZNd1ZTiGIyZ1OZqEVziGW0UmlimBFcoiFKJkcOGCgeOwcmK2BgowyqTrEOuSsMzrEqrlq8hCp3WVpS4eYb8mkLmKZHGK+JZNZRplcvTrdRUJRnS06njmOqSjKRVXIDDFTyWTeUH3XejKJTz4ljI3KEGuUQ6yownJRhTpipxEOsaVLgf/8R/xvcog9/LB43GSTaB9SSyalQ6xZglhRzyF8kQ6xIUOqL+V1WQV1bHmIPD4CXMgg9ZCldPUoLw0qKvmuqzziYQbHls/3HNSRVpDtuB3IZpchdUM6xMp6hpjtBFOKBSqhSiZN2Sq+GWK9rZUhVriSyb5ly1sKCgBf+MAXMLRrKD6y9UeyNVCQkkmvDLGUJZPisTcSxEKVTBZFiNDxzRDT39fFMt0R5nKI6eWWrgB+iatkEhDbii602SjqhbxOK4fq+4hPeUomQ4Xqu+bnI+b5ll+GzBBrlEOsqMJyUftF7NRTEJOiFhANTrXuutFr0iE2fbp43GST6D1ZMqkKYiyZzIYUxIYPB7BI/J8gqFRQabhjK7dTKYMIkitU39PVFTS4PWV5aVEEo0zZdD7bVYNLQn2+57wloe1A86/sSYyqsyopVF9Sz5JJuWOo7aUsmQwlHvmq4bHPmBxiLZYhFmI97zBmB/z0Qz/FqMGjkic20SiHWFLJpE+GmClU37C/RD8ifdta6JLJot7ddTnEXLlgLrEsySGWlD+W1iFmeu6ivzhI2iFU3yXypA259xXE5HQ+88vqEPPtOx1i4Slqv4idegpiHR2RsAUIgWzo0Oi5dIgtXCgeN900Pi0AvPFGdH5ic4itsYa5giMURT2H8EUKYsOi7yJocLuHKOGTIeYdTJ+3TM6jrbqF+AdyiAVdVwEFI6/g/ZDOw4DlrLmdh9rN/MR+sWSSFIVYyaRPhpjpB7cIGWKhQ/XzOsRCZojlCLAPRY1DrJl9akbJpPxfLZk0ZYjJi109Q8zXIVavksmi3t21ZYh1dLhHjnSVUyYJZKFHmTQ9d9FfMoZaOVTfp2RS3edN7mVJPUP1szrE0rblUxIawiGmb+95tv+iCst0iPU/6imIAVHZJBAvlwQiQUxiEsRkqWVXFzBQiZpQBbF6lksCxTl2ZyXmEBOUHMuStvQrt+jiMb+QriCftkKW0oUqA1Sny1symdbdFnJb8BYPc25Xqfue090Wal21AxTECkYsVD9rhlgIQUw9uU+TIaY5l4KVTHpaaU19CZIhVipmhtjK1eK7zir0BaGZJZMd5u+lrDvIMpdMtplDzFYymWakyKTP6iJXlgyxpAt5OsT8KMp26CPgZHVZmaZLmyFWD4dY1pLJojrEiio80SHW/6i3IKaWOeqCmCyZlJhKJpctE48jR8b72EhBrCjH7qzIUH3FIVZebrnOgXJhn9dZk9Kl06gQfx9RotGOtJCiS+rQeR+xMm85a5by0pxt+dxUT1vOmlesLFoFVLNo3yUvKNZQfduJXGhBzHRynydDrA4OsUyjTLZohtjKXvFdN3Xky0Y5xKC0mRCqX16dkCHmOcpkjUPMw0bvRVHv7tpKJpNcX/oIlEkOMZu7DGiuQyxtIH8jaWWHmEmItYlKqluxnqH6zXSI9ccMsaIKT0UV6oidojvEJKqwBtAhlgbpEBs4qPpSafly6+Q+OVUhg+LT5mLlFi48pgtZShc0uD1QWV5qsTJgWaVvW3m+G/W9Rg1AkHpdsWSSFIWYQyxryWSIUSbVC4UcGWKhdq4sgpTqEJMCXatliEnnW0s6xFIKYmV1JNDVFoeY3DcyOsTkes8tQBb17q5vyWS9HGJJ4pkkKVQ/q0Ms7WcbST0dYs2+qDKJPCFLJvubQ8xVMllUh1hRnZZF7Rex00iHmBqoD8QdYqUSsNFG0XM1ewyI54fJ53J/q2egPlDccwhfpCCm/P6XVhmO5314OcSUS9o8OVzq/LxzsQIJF67pQpbShRqpUZ2uEYMGqH1ptFvQNV1q8TCnmOftSPO4ic9QfUHzr+xJDOkQqxllstGh+raSyYSdRe5M1cEBQjnEspRMKn2RQmNW4UgVQJrqxuqj6hArQslk34Vn3UsmgegCOEkQyznKpCyR1R1iwUom1f2i2WKESoiSyTwOMV0sa4RDLGSOUj2pp0Os2RdVJiE2a8lkPR1iLhHLxyEWIlS/qA6xombxFbVfxE5RHGLrrhs/x1bD94Fah1i5HH2eDjE3KQUxrwyxlAJVyFKz3MJFoJLJ2LXS0mXW+QUNbvf5bnyWL6RYGdBlVZdMr5xiXmqHWE5HWjtAQaxgVIUkoPklk7ogpl/YGqgpmQy0c2XZYas5W73R+siaIVZUh1i1ZLIdQvXV10wlk+rAB7ogpofqq/uIoUzO6hALVTKpihpFOqHNWjLpCtxPK6ZlcYglOcZchBQF6kk9R5ls9jZoEmJtIlZSyWQah1hHRzqHmEug8nGI+YprvhliSeuq3R1i+rG9KP0ibpqZIaYKYmp+GCBurKkh+rpDDIjKJimIuakKYlHcQtnk+JXvpXQh5XXypA0jDyVcuKZT51DqNX/vsXbenWedX0jxMK3IE0yszFvO6vH9ZWnLZ5vJK+b5uttCrat2oPlX9iRG6pJJVSyQ9PZmFyNMJ9vyRyohPwxQBLG+kslQ4lEeh5gsKwRaL0Os7UomfR1iSRli+oWRdiJmyxCreT0rRXWI+QpippJK+TxJPEsqmfRxiNUrVD/tZxuJYZvPTVHKbnxcT2lD9dOWMNbTIZZ1QIAkh1hS35Pm1+oZYnmEctI8mukQ6+qKnGBqfphELZvUHWIAcMwxolxyr71yd9NJUY7dWZGh+r4OsUBlcrGL/4ULg80vt3Dh4/767+vR/4sWmadRBiawiWZABvHQo++5ywDTipW+6zNveWnAtuoxAEHuzLIM19etSPsueUGxhuqbBLFyOR5mrZL1xM+VIeZxISZ3JinUNDVUX7qoVisOsRbLECtEyWQzMsTUPKE+YqH6aUomDc+TMsSCOsSKdELrmyHmEsx8xDT1fT2QP4RDjCWTfhTFZRAyQ8ynrawZYlkdYiFD9X0cYr5iXr1KJiuVYhzX+ovYTeI00yEGRC4xkyCmBuubHGLf+hbwxhvA+uvn62MSRTl2Z8XgEMtdMvmfV2qmr5nm9Tei/xcssLZVXt7XvxX284G0WU+5XU9PPh39b1lX5UdnRm06jsF1CW4P6BDLK8DVwy2o/29ryyd3znVk8xqAwNeRlnZdsWSSFIWYQyypZLKz0+7ayiuIqXef5Umkx4WY3Jmi0s9AgliTHWJqSWJTyxP7aItRJn0yxJTtP/ajpbtp9FD9hAulho0yCRTrhDZrhpj6vBEOsXqNMpn2s42kHUL1Q2SIZRW7dHGt3g4xU1uyD3KfSBL8kpavWSWTedsKBR1i/ZNGOcR6eswur9GjxWOSIGb6LFC/fqv0d4dY375Yjgli9mOGl+gyfXrUls1Zc9/k6P9VDsFo9mwxzXyHaJZW5Mnrenrqqej/leZ1VZo6NfrfsW3UI//MtwwwVHlp0Pl5LJ9rOu++LxG5bqV579mnSet0DPjdsGSSFIaYQyypZFJ3bahkPRl1ZYilcIhVM8Tq4BBLO8pkK2eIsWTS4hBLWzLp6RALFqpfdIdYkiCWxiGmi1q66ytEhljIksmiXjS3cqh+yAwxn0B5k9ssr0PM5f7ycZGpr4VyiLmmAeoriBVhPyqiSEeSaZRDbN11zfP4yleAj30MOPzw2vfUkkmTQ6xRtIpDbICHILZkCUqLl4hp3nnXPM3ixbH3rM6ae++N/l9tmd+cOcKUAH+hJO9IholOnuXLUX7m2Wh627qaMqXa97KjZNLHZJC6LC9vyWRKAS53Oaunyyp1W7Z+PfFE9XsrL1honV/qARbyCK033ojygw95tdXqNP/KnsSQDjGvUSY7O+MXkyohSiblDpQjQ6weDjHvUH2DQyyrkFE4QUxbtpYWxNRtXLZpyhBTxM4ah5geqp9wISgdd2W5rYUumSyqQ8xWMunzXB4fkkok9feb7RDrLxfNrRyqHypDTF0OHwdViAyxNO42l4tMnS5vhlhWh1geEauITssi9okkU29BbO+9gf33B77wBfP7RxwBXHcdMHhw7Xs+DrFG0CIOsViovsmx9f77wEEHodxXulheutTc3i23xM5BjQLAm2+iPOuxaBpbiea111bbKnXYz63TZj2VHD+zicLFAw/EyjdLpmP16tXA/fdHYt7qfCWTvtddQfLW7r4b5bPO9ppfKAHOO8Q/ZVvWfn35y9F2Zcr/TtFWfBo7iULrccehdOutNdO3I028iiYmqqWGvoJYvUomM2aIyZ2puhxFcIitjkZizNoftSSxqeWJfZiWrWnUu2RSbuerV/uH6idliGUsmWx5h5itZFJ3o7pEriSHWFLJpI9DLGSofn8pq6qHQ6woF1VZXV16SaG6HC43VtqySh9XV14xT32NDrEwFLFPRae3N+wxJgv1FsSGDQPuuSf7ZyV0iGXHGKpvOP78/vdC5Nmzbxqb6+l3v4N6WlY2CQC33hoTpYyCWKUC/OY3KE3sm8axfkux/y2i0py5UZ+U/2vbSrjZetddccHPVDL5xBPAggUoV4BeIHfJZKLAqLVlFV1mz0bpvfnutn7+c5SmTAG262vTdjPooYeqy173AQ8qFeD730e5K/reMrc1ZQpwxx0o9S2fa8ADLzed4nQsv/W2tS2n0CrdapXa6duR9l3ygmIN1TedyHV01DrE5EVrkzLE9JLJIowyKd1qeVxURXOISWQ5aEs7xFQxxlkyqYiWtpJJuS2bLgSXLgWWLxcfs4XqlxJOWnwpukPMp2TSN0MsqQxSD9WnQ8xMKzvEQo3UaHOIudqylV+anFhZHWIhQ/WZIeYPHWLpePZZYNQo4IormtuPegtieUgaZbJRFCX/MSvVksnIKWMUxN56C0B0fmkUEhYuBP71r7iAY2rrzjuTp3ngAeCFF6L52Vbv5Mko/fSnUVumbXX5cpQ/9alomiUGd9vy5cDHPobSP/8pprG5uu6+Oy7Amfrelx8mp7MKYlOmoPTqq1FbJpHna19D6fAj3NOsWAH86U9VYdF4XrxqFbD77ij/+Ep3W0uWxL8bZUTNKtOnAxMmoDzjUXs7ALB6NUp95/LWfi1YgJLaJ9PgCU8/DVx4YSyXzdjWl7+M8nHHuaeZNk2859qOAbGN3nZ71C/TMt52G8oXXBhNY9quVq0CbrghEg9N7fSto5hIzJJJUhRiofpJGWImh9jAgeIxqyAWKEOsOspkoJ3LN0BQRZ93HmdXFodaPSlUyaQUiZosiMUcYqu195MyxJYtA7baCthpJ6BSsYfqhyqZLKpDzFcQcz1vhEOMo0yGoSgXVWlGhnRliOkOsTwlk/V2iGUtmQzpEAspYhVRfOov+3ZRePBBYN48oO/ivGkUWRArmkOsSOcPaaiWTKoOMYNja1lfCLmrDPC994De3rhgtNxwzfT227FpjCWazz0Xm58xh6tSAQ4/HOX/RKKS0ZH2wx+i9MiMqE+mY/ADDwB/+APKi4WoYRXEXn01JlyUTQ6x//431vfSasP83noLOOAAlP9xV9SW6Xz2jjtiAwoYr7tuugk46SSUX33NPs2iRTXr3SYexoQZ07H6tb759F0HlJctr50GAA48EOUvX+Se3w9/GJ9GGX20yvz5Yj5JTrm//Q2lxZEoVXr//dpp5Hbc99S4Xb3/PnDYYSg9/0L1JeN38/Ofx7YTYxbeP/8JnHACyi+8aO+3FMSUl9q5ZLL5V/YkhtUh5iOIdXTUXvinJXSGWDNLJrV55xGNSqUShncPR0epA4MHGHIlGkxNyWQzyzj7LgTrWjKpX7Sq5VOyG6o4Jn9s5DarZ4jp+8ebbwKvvCLuCC1dqghicYdYsJLJojvEkgQwl2CW5AjTn+uh+lkcYiFLJotwIW+ing6xZl9U1SNDzCUGpc0QC+UQc4lrlUo0nfwdL5JDLOk4VUTxqb/s20Wh76INyy0XmvVg+nTgba3kp8iCmOoQk6NVNoOiuHuzYsgQM4ogmpBgdD31TVNW3WambXjp0ri4YRISZFvyJqjtRsL778eFBFPfn3oquURz0SLxnpymdopqvxJLJmXf5Y1bk7j21lvAqlXJIsjSpXHxcNHi2mnefFN8vm8+pfcX2fuUVJanL59DHK26rEyjMAPAvffG+mI0Zrz5Znz5TG1pYqy1Lb3vpnUlq0/kgACm7Xj+fGD16mQBbtEibVsw9F37bspLDfuD4btx5dy1OhTECoY1VF8ebNUgPj1Uf8CA2tKwtJhOpNNkiPXt7NUMsTqE6qfNEJPkzdm66bibcONHb8TwnuHJE9cZuT4KUTIJAB0d/dshtlj5AVMFsXqVTBbVIZYmQ8zlEPMtrwToEPPFsM3npigXVfXIEPMRjHwEsZAOMZe4pj53CWLNyBBbtgzYckvghBPM7wPFLJmsx74tRaOkaZq9T2VBigg+yxiCZ58FdtutdrsqsiAmHWJDh8bL/RtNUfIfs2IM1XeIIDILyeSskcKF6jYzOcSWLk0WqPpC+2WYvnF+fdPEhARTyd3y5fHzYpNApYs8tsPGsmXJAlwa8TCtyKNkgNnaKi1YYJ3GR1SKl4R6uAUdh9hEUalGHLVvC4ltpflu5LWEaztW27Ktq6TSX/27WbKkdhpTyWQRfrebBAWxghEL1TeVTKqj3ugXqZ2d+R1igUomZYZYPRxiaUeZlOQVjQ7Y5AAcvdXRudoIhS48Nl0QK5ebI4gp238s5y2tILZIubMVc4jFSybl67kdeUV3iPmUTGZ1iOkCWUdH9L6vQ2zlSvFj/pvfCBu9z4X93/4GPPywuS39szNnAn/9a/TawoXRMfj118UoZHf1lRxUKsC7ylDwjz4K3Hxz9PyWW4BHHqmdrw8LFkT9a+VQ/TQZYmlKJvM4xHxHhgzlEFOfp80Q8xnVMo9D7MUXgeefB/7+d/P7ps/azkGWLRP77euGjBiVJUvEXx5CO8SuvVYIIer+rTNvHrDuuoCSKZOZl14CDj8cuP/+/G3pxykTUghLEsSWLYv/Zpp4803g17+O32zSeeWV+KPaV6DYglgzyyWB4tzMyIoM1R+gOsTsF/YleU5mOo5J940yImTZIogluqykCCIFMdP6laJZTBAzzE8XShyiSzWOw/R1Viq1ApVJgNPdbb6uJ5tDLCaUmAU/0fe+aUwij/xuPESlxHw3XfAzrau+z8X6bvoOdVHJIVb6CFTe7r2qsOsrVhrQ5ucSWp3ryuCsLBf1hnADoCBWMBJLJocMiV7TSyZVh1g9SibTCGKyZLKJDjF9uiKMDhmK0GJfbuohiLlKJhNC9a2CmGxT3z80h5h0E+olk22TIeZTMpnHIWYT09I4xG69FTjtNODii82i1ne/Cxx5pHhvzhzgqKOAj3xEvL90KXDllcDLL5sv5E84ATj2WOA//xFi2IYbAgccIN7/29+A224DfvEL8fyyy4C11gL+9S/x/PjjgWOOERd5r70GHH20eA0Q29mVV4p2AWD2bPFcbn9LllRDhDF/PrD++sDBB4vnrRyqX4+SyVAOMd8gfJ/8Mx8RC0ifIeZyt4UQxKRzaKkhvFfi6xD7y1/Efvv1r9vbWr0a2GYbYPvt3cfHl18W+49NOEvjWrvvPuCPf7S/D4iMrVWrxKON554ToliSiHX11cAhh7hFvxtvBG6/XQhLLt59V+TPuDj/fGD0aLc47yuI7bYbMG6cu7Ty8suB008Hfve79PMrsiA2erR4HDOmuf0oys2MrBgzxBwClRQSXBf/HR4OMeWp0ZGWxiGmutssAlViGaCcX1/1j1G8WbUKqFSSM7aqIkjyuior+5bxOs1HPNQEHGMulklUMp1uaG46Z8lk37l+2SFWxq5HlhvWlbYtGNeV/G6UbdRW7hnLd3OIlVVh17UddyUMNFHjSPNwiJnWlRQ0e3qitkzrqk2gIFYwYqH6ppJJXRBTy41COMRMFwpyh/TIEJNCTdXp1swMMe0g33TRKCChy0Fz0yyHWKiSSZtDrF4lk0V1iNlKJn0cYvI7SusQ091lvhliUjjqy8SIsXIl8OMfC/Hq8ceBd94R61lm1fz1r8B55wFf+5r5ollt+5VXhDg1a5Z4TV7AyseZM8Xj449HnwHEPOX85OONN4r5fuMb4vmll4rnf/mLeL7//sDGG4uQ4JdfFhe5cr7yolE5ecmNy/nUSOpRMunj/ko7ja9DLM/8gLAOsVAlk4BY37ZzC9/yRHXfsrFggRCNX3rJ7US65BKx/9x0k/l9X9caAJx0EnDyyW7nmlwPLmHQZxoA+OlPgX/8Q4Rq25DHGJfLatkyYPPNgV12cc9v5kyxXTz5pLstwC10rVoFPPEEMHeu+zvsy7CpyQdTsZVoFlkQ23dfIfb9+MfN7UdRbmZkxTTKpMshVvZw1sQEMcM2rI9k6HIh9R2DjSKPFEpUd5tJgKtxITlEngFSEKudxJjD5XQhybI8l+jSHbVlOp/VBaOc301MdLGsh5io5PxuxHp3ratY31eY8+TUNpzfjbKuaq5pTe49h1jpU/pb7o7WlZ/Q6vPdOOan5viZnI5tAgWxghFziJlKJpMcYnp4eFpMd7JzlEyGGpFRPQh5jzJZKphoFJCWd4ipn/UUxNRQ/Y7Vlfj7+n7hXTIZF8TqEqpfpDu8tpJJvTy7CA4xecG5dKlZ1JLvL1sWdyKopUPvvGO+kFen110M+kWc63V9Gjlfdf7q82eeERfBr71mn09IQawoofppXFa+DrG8JZM+I1/69j2NAw6Ijlemk/R6OsRs5w3qxaVN6EkrrrlcSOp7LmFJ34900jjE9H3T1S8fQSzJZSXbcDnEfJx5b70lHGnPP2/eXvS2fNa7axqfbcG3rf7oEOvoAC66CPjAB5rbj1ZxiKmikisLqe+Gp1N0UQUx0wiEehmgT8mkyyGminkewoXRFaQ7xODpsnK6kPrOV02jaBpEl5q9rLe3Nv/MJcBVvxuH6NIzKGrL9N2kCdXvdJSzVsXR6HzV6HrS3W1Ox5YiiOlryzRSo0lUktP5bMfqd2NZV4m5X1oZsdFNJ/ukujQpiJGiYHWImQSxAmeIrertq+MOVTJJh1iMwi1baEFMPbH3dYip+RFJDjF9/1AFsSVLrBliwUomlTYLdYfXt2TS9TzJIZb0Wd8MMfXi1HQxrl5sqRdxMn/M9lldEJMXfatWxdtVBTk5baUSHatVQWzlyupJZvW9pEe1j313IQEAAwfWro+sFGUbTON68s0Qy1sy6TON2k+ffDCX26w/OMT0/1V8xae0gpjPdL4ine28qLc32m9dIo/cJ10ili6+J02X122Wdl2FmsZ3Ot/lU9dVkQWxolCUY3dWTKH6ptHydKeLqwyw0+EQ63O3JjprtDK5kjr6rz5NzCFmES6Up86S0D7RpWw6bdZca4CnC8kkkEsRpFsReXSxS84vSTyUbfX9HjvLWWNleRnXlWyrb727B1hQxUpz+SwQndO7ylmdDjGDe8+Z71Z2rCspYnkMDuFbzuos/TU4xIyOtDaBgljBkA4x6yiTviWTeUeZVE/uszjEZIZYqJLJpHr3hM8ArZ0h1vRlCy2IqduvKUPMkGunOsTKumCWdZTJejnE1L4V6Q6vb6i+6/0kh1jSZ13zlejClEvUWr689iJO/axLHFWn1dtyiVhZPquWo+l9XrGiPg4xKSwllXfVm1AZYnlC9fWLhywOMZtAFTJUv5kZYoB9W/EtmQzpEEtqy1ekS+t68hWoXHfbdUHd1VbIdeUj+IWYn3qjIGl+vb3x76a/ijyNpCjl7lkxhuo7XE8yN8ohJJRcofpVASTClSFWVsvy9P24Oo0iuviUTDozxLqj+enHailceDjSAFRzqpziYcyFpO2jptEVXQJcWZaXukSehPJSPUPM5Ujr9CgvVYPiLaISEJ3jO8U8ZV3VnCeY3HvO7bjvGsKZhZdOaHWOzuoSxKoOsYTvpk2gIFYwqtlbQLaSyVAZYqaSSZ8MMW30w1AOsdgIgnSIFW/ZiuAQyxOq3+gMMaXNQp3Q2oLbm+kQUz8rw0aTSiZVB4cuVKliU1pBTH2uP7qEN5/P6mKa/lwub0hBbLvtxOPkyeHazIJJ5MmSIRYyVL8eDjGXaKYuS5EdYo0omdS3fRuhHGK+Ik8ah5j+v226vCWT9XCIrV7tJx7W221Gh5idopS7Z8XgEHOWMLqcNSaHmF5qZhR5PHKqlM/qbaliXtkyymRsfq5RH6slk6jN8Ks6o/wEsXJJrqvVtcd9bX6AQTDSxBsgQeSRootL5FG/56Xavt7nvk8sA9Ta8s53s3w3QHRO78wQ61bce/q2YBTgPBxiriw8l9Oxb115Z4i5Rks1OMSMAlybQEGsYCSWTA4eHL2mC2L1DtXPkCHW1FB9Zog1jmYJYsr2Xy57lEzKEwR1BEugdpRJeTeyXqNMqn0r0gmtXqoiH/Xy7KQcsCSHmCt/zCWeSTFIL4l0Of5MwpTqhNA/q47W5uPyUp0QSQ4xV8mkbT5yHvVwiB1+uHi87bZwbWYhjcjj6xDL6zbzdYjVo+90iJn/t03n26c8gp/P/PS2bH33LdFMO7+Q7i/bhVEo956rLZZMJlPEG2ppkIKYOqKeh0PMPcqk48K+T3hODPHXHGLlCmq3YVOGmMX1FC8DdDjS+lxIpQrsokusvDTZhVSqoPaYV3UOKevKJh6q4e6OkslqpldWkX3rjnEAAHxUSURBVEe245shNsAxIqfJTefjEPNx7wG1NzCqAqNaguoSDz22Y/V7Xqb1Xbq6vN10srzU4axMGhyiTaAgVjBiofo+GWJqyWQIh1igDLFqyWSoDLEsofpFc1EFpOVHmdRLJn1C9ZXtomwL1QfEviHbl3lMjQ7VV9ospCCmOrbko6+o5eMQswX2J5VXyu9r9eq4Q8w1SIIuLqVxiKlCFBAXrvSSIJcjTH+eJIi5xLWQgtjBB4t947nngBdeCNduWnzEJ58MMb2dEAKVS3hSX3MJcGmWD4h+x/uTQyyt+BRC5EkSXUIunzqdj6vL1ZY6TV6HWCMdW77T+M7P1ncKYskU8YZaGowZYo4Leykk9LrC1lWRxyJiqaWCrgwxKcA5BLGYmGcpmYydF7tElwGKQ8wiiMXm55FTVXaIayWXQCX7pOaMuUo0UzrErI405SWne2+AUs5qEfxiwqdJ3JfLKM/tnSOAKvPzcIjZxFEgunFfUm/M623FnI7LzNN4Ox0do6Wa3HsUxEhRiDnE1B86W4aY7hALNcpkxgwxeXAJPspkCIdYs3O2AlI4h1hHR/0cYqaL1qRQ/Qri7yt3QLByZbR/SHHBmiGmOJeg/HiG2K7XXVc83ndf/rZCoZdMykdTmWNWh1jackt1WjVQXjq5fMoebeJSkpi2bFn8BEh3l6mPprLHrIKYq62QofrDhwP77iv+b6ZLLGswvas0UU6btS0f4UnveyiHmE3w09vyEeB8HGJqKbKJNA4xeQ5ShFD9tK41V1tAuBLGerms8qwr37Z8++6TIWabHwWxZFrFITYgoWTSJLrYygBdYeRSAFHD3U3Hg+ook32CmNJ+zTRJossyj9EAq4KfHA0QfqKLswywbO+7aV3pJYzVMPmEdaX13ekQG+BwpGlCHpAgjnqIh+Uk15OcTq4rhyNNOg/dDjHPfLcOh9DqI1Bp7QAJ303VIWbPEGOovoCCWMGIOcRUUcCWIRbaIZYzQ6wRofrMECvgstWrZFJe9Pk4xFRxzCWIydECgUgQ01xBm43cDACwWfeYvsbr4BA780zx+OMfF+ek1rdk0iRyyfeTHGFpA/lNDjEgEsSWL4+Oj/KkJCkYX72w1Y+VrpJJk6ilXvi5HGJJofpJ7rJ6ZIgBxSib9HE0+WSI6ccFn1D9pBErQzrE8gp+altpQ/xNxxj9xkCIDDHZViND9X371AiHWFpRyaetvG46dZRa3/XOksli058dYspozDEnzyoP0cXkCjKMzle2lQH2ROcQLtHFyyGm5kbpQkLfIDm+JZPRqJawijzxsjyHYKSWTNocRq7yUqN46JHp5RrwwOXeq458qczPsS1I8clVzqquqxpHmnI8LLlG5KyWz3qIler8HGJlTPi0CWIdHg4xdX4+brrein1AgKTRUtsECmIFQzrEyhUACxZEJwY+ofohRpksasmk0k7mUSabXVYYkMK53+pVMilPcLxC9aMfkY4kh5heMqk5xHYYswNeOe8V/GqDs8Vr9cgQO+MMMf9Zs4ApU/K3FwLfkskiOMRU0Ut+f6YSWJdDbNmy2hGkfEP1V6wQ26HqEEsStdJkiOmuHPl+vQSxf/9b/OY0gzRlgL4lk0DxHGJ5yzj16dKE+LscYkmCURqH2KBB7raa4RDzFenyzs+3rUY70latirY31/L5lF+mXVdZ5kdBLJn+7BBTLszj4efJzhqnkOBRMllSBDFXhlhJdYjZQvVVV5AuwCnbtjxvdJVollUXUoMdYlbxUBWoTN+N5jByuvfUvltFnoR8N5M46lXCaP9u/BxiKcVKhyOtmu8GR99dIfe+pb/VthQ3nUWIjI+WSkGMFITq6IwViJM3effQVDKpuzZCOsQylkzWLVSfDrEYhVu2ejnEVNcRlDZVt0gfgwYMxNDlwBrLShggf9tMriVTyaQqgvTtcxsM3wADoGRbIdoOgwhiI0cCn/iE+P/HP87fXgh8SybzZIilCdXXn6tikOrkkv+bSmBdGWLqtHI+vg4xQGwr8sTUJbzp8/VxiDUiQwwANtkEGDNG7FMvvSROuPbaCzj//LDzcZEmY8s390u2F6KtvA6xNCJWaIeYS8wL6aCSx1QpSieJT66RDH1EFx/Xk37jI69DrB4lkz4OseXL7cKHT9/Tiliu6erlEKMglo7+7BBTjg3OUSZXr46cZKooYRNUOh1leX37Wdm7ZNLh5KmWFKoiiLkMEFBupK50iC5eDjFHWV6louRUJZflxRxiNsFPDdV3iZVq33Whzue7qQpi6nfjUT6r9LVmGle+m/KZkksQ0wZYMJZMmrYFj3w3Z8mkOnqpZV2VksRK3SHm2q7oEANAQaxwVEsmy30nAvPmiUfbKJNqyWQBRpmUB/6qsFcHhxgzxGqXreUFMQ+HWHdnD+7/NTD11jVrHWJAtG+oFxcWh1gV7cQ8aMkkAHz+8+LxttvsJSqNxFYy6XJ16c9Nopavu0wXMvT5qGKQKmIuXBh/P8nlpa5rKYDZ3GV6hpja1vz57vnYnstRMuX27BLP9H6EzBCTjB0rHufMAR5+GLj/fuAnP4mvi3riIxj5lEzaHI7NdoiFGolSn87HIeYS83zLHH0cYlnEpzyiy8qV0Tr0FemKWDIZUlwL5erK25YiZNAhVif6s0NMFcS6HGWHqpOnrDi2GuIQcwhwhpJJqyDW0aG4kBzz63SIPLKk0FUGqDrS5Hmq002nluXZyktVgcpRBqhmetkytmIjJ1r65FmiWfbI4XI6xOT67eiolpeWHS6r1OWsJvee/A49tmMv8VAd8MDHTWeanxRQY+49huqTglAN1ZcH7ffeE49pSybzjjJZ4AwxjjJZu2xNLwdtVsmkuk2Wy9juLWDrt+P9qiL3DfUHzTLKZBXtxFyu92Dre+utRYnR6tXAq6+GaTMPvhliSaKWyyGWJKbJ1+Rz9bMDBkT9MDnEfEomdbFJimnys2q7rlEmgbgg5hOqr76vlieaHGGNKpkEgHXWEY9z5gBvvCH+X70aeOCB8PMyEdohpm+/eQL6QzvE8oh0gN+6Mol0eUom6+EQc7UVSlTSRbpQofpq2X2WttLOL29bPu3obeXJEEs78qXeFgWxZFwifdFRBTH1YlzfP1WXlSsXq3ph7xASDCJP2ScoXutHrC1VzLMJVD091WsWZ8mkzOHyFXksTiwAUYaYqe8m0UUP1TeKPMmOLe8BASzlpTGXleO7SesQs7msMHCg2yGmrStnmaPLLbhqVfXaJTYCaBZh1+QQM+WtaQKce8RRdQTQAtyYbxIUxApG1SGmC2I+o0zmLZlUf1jzZoj11i9DLLNDrNmiUUCK6BArhRTEMjjEarZX/X2TICZPjtS7Wo10iJVKomQNAF5+2T7d6tXAf/8bZp4ufDPEksoe9Uf9s1kdYuoxzlUyqb5nclupP/q6IJbGXZbGIab3w/VZVx5ZvQWx11+PXm9Utl2oDDH9uJDHjVUPh5gqrrmWz+aA8+27zyAFQH0yxBrlEPNpJ6RDrC8kO0i/1Nd93WZ5xKdGO8Tyzo+CWDLyZuHbb7unKyJSzCmV4gHiutNFbhOdnakdYrZcrNLAQdH8VqyKHxcrldoMMadDTBFBbKJLT0/kEFu5qvY4XHWIeZS2ucry5PxKpUjk8SzLs+ZUqW46h+sp5tiyudsG+Ig8foKYXzmrIhjZSibV72bV6sTvplyB3QGn9L283FPYtawrp3hYdYgpwq5DrHTmrfnMr42gIFYwqg4xeQKnl0zqGWJ6yaR8nkUQU0+qM2aISaFGOsRCCQdqO8wQK+CylcsoIepXbkFMbu/yR1S/8DMJYvr2qr/vcoipP4Quh1jIDDGJjyB27rnA+usDU6eGm68J3wwxn7JHm8tLb0sVAJIcYuoxTj2J00tg0zjEdHdZmgwxecPCZ1qXuyxJeKu3IDZmjHhUHWJA4wUxH8HIR8TyLZlUt71GOMRcIffNdIjlFYwqlfqVTBbFIWYru3FNl6fv+nTNFKhs09j6rpeZ22DJZHYOPFA8/uEPcVHsmWfqf56QF7kfDhgQr/6wiTw9PfFzS58wct31ZBJ51OMWIM47+7a9so/rqdtPEIv13eKCS+9CsjjEVJFHfV2iB+Gbls80yqQrp0oVK30cYkssrjVV5PHJEPPNW7OIoxg4MB5yn5B/5gzx98yTk4N/ZS79TSkeOgeHMImjJuGzTaAgVjBk9lY5S8mk6p7IMsqkesJsKpnM4hCrR8lk1lEmWzhDrOnLJuvwQwli774rHtdcUzyGcIhJIcXkEFPxKJkMKohtvLF4dAlis2aJx+nTw83XhG+GWBqHWJrP+jjEOh3irykTLkmYknf8smSIqYKYT8mkTRAzBf/rbdUzQ8xUMgkADz3kvqANhY+o5JMhZhN0i+gQyyqI1cMhliRiJTnE1O8hjfhk27ZCiUohHWL6RY7N2RXSIRaqZNJXEPNZ7/VypLFkMh0HHgjsvLNYb3JQnt5e4KCDgP32A2bPbmbv3KiCmDqCvJoLCJidPE7Xk2Mkw779LJYhprelhq17lEzGg9sdDjFXuaeeIeYdqm8XxKolmh5lea4+lbujdVXW57dqVfV46jPqo59DTBHgVvfW/r7rrielrzXTxNx75vlh4ECUSx4lhQMUsdLDIWb9bhQB2Df/rLw02SHmDPH3cYi5xMM2goJYwaiWTEpbr14yqYfqh8wQ0x1iITLEmlkyWTQXVUAKt2yhBbF33hGPa60lHutRMlkqAcoPQRXHiblcz0EFSOkQmz1bLPchhwDXXx+fRh4H1HK2emBz2CRliKnv+zjEbI6xNA4xE6ZQ/SSxSTJoUO1n87q8XPNN89lGZYjNnRvfxpYvr78IC9RvlEnbdGkD+pOcWHrffRxiWUP1i5ghpp5vFKlkMqRDzHLRmKlfWRxieebnI2IpOTc1n8nbJ58RMukQS0epBHz1q+L/n/5U/J4895yIVli9WgyMUlTkftjVFbu5W+7VHFsmkUd9XZsuliHm4xDT25L/l8txIcEmGClledacqu7u6vm6UcAxleVZHWnK/BwOsZgjLUF0cfbJ5RBTQ/w9XEhll1gphRldrLRmXiWPOFpy5bspQqtToNJHmfR2iNnz5MrqgAdZxEPDCKA1QrIyArOXQ6yTghhAQaxwGEsmV6+OTma7u6MLe71ksogZYkVyiDFDrH70HXRjB/siCGJ6IDsQ/SjY3EYOQezMnc/EIZsdgkM2OyTFwiSglkz+/vfAP/4BXHZZfJpGCWK+JZN5HGKu52kyxEyYLsaXLXM7t/TPqiQJVbpDzDVtGjFNF88WL46O6Y0K1ZcjTzaibDJUhpjN4Zgn5F7f1vUTT4nLIZZmZMhyOVq+SiX/4AKhM8RM+476uRAOsVCiUn92iOkCVSiHmK/gF+q7AWpLkSQsmczHEUcA22wjcjCvuw6YNi16rxE3MrJic4hVYN4mFJHHFaofv7C3CAmDogyxGsHIJJQAtfOrOsQ8SyZtoouSS+gUqGwOMfWYrjrSXCWTmqjkzOFyZYipI4B6lJeWYq4nS59UkUfpBwDNkZZOPHSVTMbWVULovHMadX4Ot6CfsOvYjuU0unio/t4q35XzuzE4xGqE3TaCgljBiAQxxSGmHogGDIhcLfV2iGXJEOv70ZKln3SI1YcijjIJ1NEhpl/4qQ4PrQ/W57ogZnMbqRco2on5YZsfhjtOvgNjh471XBAPpCD20kvRqH7PPBMv+2u0Q6yeGWImx5g8joVyiKmYxCZfQcw0yqTL5ZW1ZNI0rW1EykYJYscfLx4bkUeTVjBK6xDLWzKpboM2QczlEPMpqzS51kzT9WeHWJZgep9p9HYloTLS9Gl8pws1MqRp/qbXQ4pYeYRIi+PEu18UxPwol4FTTxX/33Zb3BX2yCPN6ZMPSkZs/GY3rIKYj8jjDAeXIo8aqq8LcGq2lEuAq5YUepTl6a4gyzbvUzIZcyH5uOl8HGKuDLGBDkFMttvREQ/xt4bqK9+NRRCLrU+976ojTa4rwPrdxDPELN/NwIF+DrG+vrtC9WPuvWUe+W7O0l/HaKlyGnW0VNd25VXO6lhXbQQFsYJRLZmUdzHmzYsLYl1dcUEspEMsZIZYpX4OscyjTDY7ZysghXOISUGsCA4x23NfQazRWSYbbSQeFy4E7rpL/N/bCzz2mPhfFUuKJIjVwyGW9NmkDDGTWGTK4zKVA/k4xOSIlBLVIbZqVXJ2mU1MW748vt3pop06n3qG6q9YEc13//3FYyNHN3WJPGnLHNXHvOKauv+r09l+M20CXNpQfd/pXPML7RBLEsRcbdVL5Enqlzp4imnkznoJYnkcYiEFuLS5X77T1UtcoyDmz2GHicf77gPuvjt6febMbFnCjSCLQ8xL5FFGmdT3BVOGmDoPwOgcMjprTA4x/dimlMlZSyaV/2MOMZtQ0uUQjFI7xDxC/JV1ZR05UZ+fTTDyEKhqvhvL4Bw+mWWxUSZtDjHXulIcutUMMZeopDrEdHevbFcvn7Vtx53Jbjo1362mX6Z1BdjFSmaIAaAgVjikQ6ysOsTUk80kh1ieUSaTSibTZIj1hh1lMvaj6XmSVOMQK7WuQ6wwglijM8T0XCtDn6rIfUP+YGcomawLgwZFDh1V+JB3eNXXXn/dnscSAt8MMZ9gfJdDLOtnkxxiWcoekz6rbg+qiAXEvxsgLpjp81m0KL5P6G2pLjBdPJPzSVr+rHR3A2usET0fOVKMagrERzCrF2kyxFwlk/UO1dfb0n8zfRxiaRxwSf1K40irp0NMfi7JpR7SheTTlu4QU19LO7+QJZP6xYvpmO4riKUVu1avTl4Heru26fKWX7JkMj9bbAGMGyf2t1dfFa8NHCi+m6efbm7fbNhGmQTM24QuUFmdNQ4Xks0hZtpfVecQYN0XY64gS/i50xUk/x8wIBqp0SW6qIKY3i9VPHS520wOMZvLSi+ZVI9T8nvq7o6LlT4upCWWadTMMpvI09npNaplqTu5fFZ1AtYIn6qo1KmIlfpx3+QWtLUVIkPMtK60/mZ1CwIGYbeNoCBWMKqlhoP6wvPVkkk5RLzc8fSL1JCjTObMEFvVKz4TrGSSDrEYhVu29dYDAJTLyogteQQxeRE+apR41C+AW8khBkQjTarMmCEe9ZwqXYQJiW+GmKtksp6fzeIQ08Wl9983f9ZHTNNFLNdz/bOq4JX0Wd0hJt+rhztMIkVZQOSHSTH6nXfqK8ICfiWFIUP1TXldjXaI+SxfUr/yzA+ozaXLGjqvXOBW908f0aXRDjH1tXrML2tbpumyuOnyuNvq6d5L2xYFsXRIlxgAbLkl8IEPiP+LmiNmcYiVbSWMusvK58LeJvIoxwJXhpiztK0abO5XMmkV1wzL5x2q7+EQc4uHDoeYKUOsgni1UkqRJ+YQ08VK6VTq8RCoPMsOYwMeWFxWsQwx27aADA4x7fMxt6CPQKUKnzaHmGu0VJt7z+oQc2ThtREUxApGzSiTasmkPKDYSibTZIhNmQJcemnc1hkiQ6zvM1VhL1TJJDPEYsQccygFc+JlZtIkYMaM6mgsdcsQc5VM6ttafxLEZI4YEJVQmhxiQH3LJn1LJvO4vFwlkz4OMVeovq1k0iVMSZSw3SppBTHd5ZVHTDM5xBoliK27biRGr15d29fQmEr8spQ5hgzVD+kQy5qRltSvPI40IHzJZD0cYnkEqpAOMX1+oUL1bW01WoAL6d6zXDTGUEZCq/kMBbF0HH549P8eewC77ir+L7ogpo0y6XVhbxISTKVfNofYoCHRNOo8lGliGWL6NGpbnkHqVsEobbi77kJKKi91tBXLxfJxiDkEKucomlUBLuq7VaBS3WZ6W4blc5azur4b2+AJpvl1dVVv9DvXpzo/PWvMJlAliJWAp5tO75fq3vMRWtUBD2zH/TaAgljBqIbqDzKUTJoEsayh+ueeC3zjG/ERxEJkiPVtUtUMsUAOMVXw4SiT8WUrhNA3fDiw0079K0PM5jZatiw6IW+GIPa5z4nHZ58VmVTNEMRMAlUol5cucknnq89nkxxiWVxeeT6rfzcuQSytQ8wkiJn6GArdIdbdDQwdKp7L/bFehMoQCxmqH9IhlmYUzUY6xEKFzsvP1cMhlqcszzQya5JQ51syGTKvy9RWSEHMx/1VT7HS9d2YpqEglo699hLnYIAQxHbZRfwvb6otWwaceSZwxx3N6Z+OGqofu7mLnCJPd+1nJaageJsrSHcO+WSIuUQeWwmj4lRylh3K0QB1F1Ja8VAZfMTpeqqWlzry1mzzs/U95kKyu+kSy0t9B1iIufc8Rpl0zC8mKlky0kq6ey9JPHS66Rzrqurec4yWmvK7cW7HbQQFsYIRher3lUzOnx9TqmOPuiCWJlRfBiXPnavOPPo/UIZYU0P128QhVqTlGtA3HPKA1cguiFUqYQQxfXuVF2pJDjEg2ueaIYgddZQQJHp7gVmzGiuI2UomfTLE5Pv1dohlEcRsIzYmfXbp0vhJhP5ZlyCmi1ppBDGbENfIkkkg2v/qnSNmcjRlyRDTBd12d4i5pgHCO8SSzkHq6XpyCXVdXbXnM7a28jjEXK6ntG35CnChxEOL66Zh86Mglp0BA4BvfQs44ADgmGMih9jjj4v98LbbgF/+Utxsq3f5uw+2DDGX68kmKvX2VgU2KfKUeyFeU2+YyFB9NUNMnYf6vyJQ1YgNvb2RkBDLELOLPIklk1kdYgkCTs381JEaXSH+VfFQE11sLiQPsTLm3rOISk63WUpRKS7yWByrLiegrzgqRSzdsWVyiHV3Z8jC88xbs20LPkKya8CDNoKCWMGocYj19gLvviv+lyeacuPt6HCXTNpObletitqUj3JeEvVEOk/JZKgMsSwlk0XL2QqIumxFWq5L9r0EZ25xErZ4F9kFscWLox9bvWRStqm6RSShMsTUaRp1Yr755uJx1Chgs83id3iLWDKZ1yFmE8Qa4RBLI4itXBk/sdFFrEWL4s9DlUzqYpoM629kySQQlU2+844o3//FL+ylYnnImrGVFKpfNIdY2ow0wO2CSzO/eobqqyWTLoeYb05VkgBn+qxvKWeSILZqlV/fbfNTv4uQDrF6O7ZCzs+nZJKCWFg+/3ngX/8Sg6NssIHY3leuFDe95Q3w//wHeOmlpnYTgH2UScAqiFldQcr00llTMrxXFXkGaaH6lswy6/yU/2MiyOre+E0AH5dOjgyxTA4xdV2pofpWh5i2rnwyvTzK8mwZYs62UjvEVLFyefy4bCiZdLq6fAQ/vWTSpy3dvdf3u1N2ZXpVBTGPETk9v5uaDLEiiOZNgIJYwZAOsXJXd3SB9uab4jGNQ+yZZ8RIYRdcUDuTd9+NNniXIJalZFI6xCr1c4hlHmWyQE6qvBTVIfaZXT6Da/b+rrlkcsEC4GtfSx75SLrDenqiXKdGlkyq0zTqxPwDHwAuvxy47joxr512Eq8/9lh+Qezxx4Fvf9t+x18ljSCWJ0PMJqbldYjZMsSyOsSA+HZs+6xEFbVWroyH0CY5xFyjTErqKYiNGRP9b3KIfec7wGc/C1x2Wfh5+zia8pRMFsUhprZVT4eYKZPNVTLpEsQqlWSBSi2Z9BWeTM9Nr+cRedTRL10jcGcRqHxyv3z7HipDLI94mKXveRxiLoGRglg+yuXoBsfrrwNvvBG9d9ddzemTisUhZg3Vdzm2lP9jZYDaeybHjzVUX3UO2aYBaoPUTeKTy7FlWD5X2HqN6OLjCjJN09GBUoc4HjqdSp6ii1XMU0s0fRxbITLETG46wLpdJYbq69NY3G01YmXaAQFM7j3X8rlKf30cd8o8VQGuZh9sIyiIFYyYs2qNNcSLvoKYetI3e7YYTe3//q92JrI9QNz1l9jyg3KMMhkq7D2IQ4wZYo3BdsH3hz+Ii+lvftP9eSmIjRoVnRDbBDF1+w/pEJM/eo06MS+VgIsuAg4+WDyXJZSvvhoJYiNHise0gti55wJf+Qpw9dXJ0/pmiLnKIPOIaT4OsbSh+qtWCdehJK0gppJ058wlmOnv6Seh0gUG1A4EIGlkhhgQd4hJIfvOO8PP2ydDLE+ofpEcYnlKJm2xBnkdYq4MMd31lFQymUZ4CimI+ZZy+gh1WTO9sgpiprZCZpaFzBBT27K56fI60iiI5Ue6fHVB7J//bE5/VNRQfdUh5iiZTBQSyuXqoE7VFk1urEGD0glUtmk6O6vzq/YrQQSxinkZSiYzjTJpc1nZHGK6m87UllIGWNOWKvIoZXnlZVo5a5rvOalkspqxpQlGlpD7VCWa+jTKdDUljOr8bOKoaX5AtYTRJY46v0Mf8VBxpMXEStM82wQKYgWjWjJZUgQxmfMlT+jkQVG/oDddLEqrtMpbb0X/mxxi+sWwPDnJkiEWqmRSdYj5huq3iUOscEKf7QJMWvWloDNlCnDCCcCcOfHpZF6RdKcAtZlB8sdUF2VM/ZD4CGLyR63RDjGd9dYTj//9bySIbbedeEwjiFUqwKOPiv9///vk6X0zxHwcYmob6vpziWn1yBBTlwuwX1iaRplMi0sws5WjmT67dGncXSZpVsnk228Dr7wi/p81K/674cPTTwMnnSQEXhOhMsT6k0PMJdLZ3F/qZxrlEDMJM3pbviWT9XQhuZxrIRxiPgJVf3GI+Ti28jjzKIg1H5sgds89yb9F9caWIQYkZ4ipr+vTqGKKPl2aIHVXbpTBRQZ4ltx5lEw6yw67/ULuE0P8PUUlrwwxfflMQhD8yz2tbjObEOTjplOWSV0+31D9mMBoc4i51lUasbKzs+req/bbVIavr6sk4dP2/UEr0QTsvyMtDgWxglEN1UcpcoRIAUsq7PLktavLPsqkZOFC4RRT8RXE9JMRnwyxvh206nQLVTLJDLEY/cIhBsQP5K+9Jh6l4PXDHwI33CD+VPRAfbVNKYRJUUgt89K3T1uovvxBM5VMjhghHpstiK2/vnh87bVIENtmG/GYRhB79dXIeTRzJvDUU+7pfUsmXTlgSe6yvA4xmyBWKkXHyCzU032VFtugKPUUxNZfHxg2TIhgo0eL1+Q++M47kSBWqQD33puu7W99C/jTn4Dvf7/2PV3kyZP7lSdU31V+mdchliZUXx+cwja/RmWI+YgnppLJUA6xlStrvxvTZxvtEAtZMlmkDDG5/fmE6tva8skQY8lkfZEu3zfeiAtiCxcCDz/cnD5JbKNM+l7YJziVqmcM6jYl91cf95dLbDCIKca2fAScOoXqJ+afJYlKJpeV3q80TiWlRLPad4u4lsb1VNOO4lgtuzK90jgBk0YAleKhZ8mktTTWJppVKn7ZdEnCp61PMGxXFMRIETA6xPSSyc99DjjiCODDH052iAG1F9CqIKaWTNoCiSVZMsTq4BDjKJPxZSvcctnKfXRBTDrDdBejSRDTSyZffFE8brZZNI1+8jxxYvy5j0NMDl3ebEFMOsSWLBHlzwCw7bbi8e23/Wv8H3ss/vwPf3BP71sy6coB00Utk0NMb8t3hEr9O0vjHkuinmJTKOrZx0GDgOnTgYceitajdIi9+GK8pPNf/0rX9gMPiMfp02vf83U9pckQCxGqH9IhljZUX30sikNM3df0E2ZTyWRW4UkfqdH0OdNrpmlMDrG8gpg+WnHaPvnOz8eRpmT0WKcxteVan/I3sN5iHh1i9cXkENthB/H4j3+ka+uBB9K7gl34ZoiZhAT1dX2a6sV/Kf6eKqorzi4fsctHKAEMQoIi1CU6tvSMNJtDTBeorE65hPmpWV2mUjo5aqdnqH6iq0sRK6t9t5Qw5grVV0s09fLShPmlcqSpv7em78Y2P1d5qW170aeziXlZM8uUUmNjW20EBbGCYXSIyZJJKYhNnAjcequ4A5TkEANqBQc1Q0z9kbNdDEuylEzWwSHmHarfJhlihXO+2QQxWS41b140+hEQv3sJJAti8+ZFrimZtaXPt7NTuFJUkkL1u7ujsrlmC2I9PdHyP/useNx006hcWi8ztfH44+JRtvWHP7hH/7SVTIZ2iKnrs6Mju0NMXryZ3ktDR4c7m6wo1NvFtvnmwMYbR8/ldjNzZny6u+/2b3POnMhdNnNmrXPIJiqFKJlM4xDzzRBT+5XWIeYr+KmPoRxiPoLY6tW108mT6MGDo7Zsglia0RxNzwFzqbCPoOLrEMtbMildxCaHmG/ul5zf0KH2trKIZsstI4SlEbHk8oVwm/lMI4/ZFMTCIgWxZ5+NRkM+/XTxeN11ZteliXvuAfbYA/jkJ8P1LcMok1YRy+YuUj+vbu+DBnkFqSeG6pscYj5lcmlLJhXXUz0cYtX1Kfc5ZfryoMHx5Utw5vmMruicTg/VTxA+resTCe42n+/ZJLSaSnGley/lAATegyI42griEFOmASyCbJtAQaxgSIdYuVSOHGLS0WW6YPMRxFwOMZ8MMYlPyWTfjhwbHCAAdIjF6XcOsZUr4yLOO+9Ewqy+fboEsd7eyB02dqy4SDPN95xzxNDjKkkOse7uSHBotiAGRGWT8sJy5MjoJPc///FrQzrEzj1XXHy9+ipwxx326W0lk6YMMd0hJt9Pcoj5lEy6HGLqMU5evMn3sgpiJnet6ztv1oVao11s0iEmL9jHjRPf84svAvvsA+y/f9w5Nn8+8MEPAuefH21L0h0GiIuXJ56IzyNL2aGPq0tOG6ItW8mk3q5NzGumQ8zmbgNqBTH1NYlywVlzfNQ/EyJUP+1rch34Zoi5hDp509E1P3mj0sdFliQqyXM8V1vyJo3vejG5h9Osd5dDTB1x1CaOqp+VvwmufuoxBXI+AAWxPMhzBXkOMGwYcOqpYpubPRv4+9/92rn9dvF411327Tkttgwx3SGWMuspEhK0Y4J8LJX8RBeXQ0x1daXNENPbsjnSViih87Ecrkh0ceVG+WRLxYSSSiW6EaEsQ0kP1U/KENM+bxJvqv3yEXmyTjNggLtE0/Y9J+WM6e+pJZouh5hBtPXO/YJ9OmuunpyfS6xUpnGKlW0EBbGCEcvespVMquglk+pzeZKrO8RUQWz+/NqR+3JkiOklk6FGmYwdTJkh1n8yxGR+3RtvxC/GXnwxOiBLh9jXviZKgaWbxJYhZiqXBITgs9FGwNpri7Z0pHgmRWBdQDFd8BVBEJOssUZU8iBFhlmzgOees7chHWK77w58+tPi/698xe4SS1MymdUh5iqhNAljLoeYvDg1vZeEKgK4ymf1aQFxcaHicWwMQrMEMcm22wK77Sb+//e/gfvui49k/J3viHyxH/0I+OUvxWuqIAbUlk3aRJ68I0Oqj3kdYrJver/SZpap23qWvmdxiNnEPKB2lEn1NYly9746nX5hnDarSzqjkkSeIUPEo0t0cQk4aUe/dAlU8vdKTuNydfm6rOTxy+U2MwlGejsqrunk9uJa73J+JmFNfc21jLIPcl35zE91qVAQy4/MEJPb+9ixQlw94wzx/Cc/8WtnyhTxuHy5ueQ9C7ZRJuV8JDZRyae0TX1PcWKhVIqLBCYBzpRTJbdJm0PM4VxLUzJZFUHk55XpYw4x7b20ziGr6GIRlTKJWIZ1UO17nlEfPVxkzhJN2/ecILSW9RJGtURTOTfLJI7a1oE6P8sy+szP5/urzpOCGCkCxpJJudOZBDHdIaZemB16qHj873/FRcmECeICWhXEAFF+9swz0YV1gAyxVb19Ft96lEx6us7axSFWuFLQAQOibWiddYRbROaHSVSXyOuvC6Hru98FbrstyrdQL8bVkkkpiI0bF2+zs1Nsxy++GHcOSeQJohTpilwyCZgFsX32Ef//+99iv/7AB0Q5g+mCavFi4IUXxP/jxwMXXSSEnMceqx3IQJKmZDJrhphLPPNxiKnfmbzgMr2XhPpZk0NM3Yb07Ul/ropnQHzESn30Sn1bcg0EoAtgjRbEVFEaADbcUIhdp50mfk+AyIEwZw5w5ZXRtOeeKwTZBx8Uz6Vjc/p04VS89lpg0qT4RVbeDLE8ofpJbjNTW1kyy3z6pD7WyyHW2xtNl9chZhpl0iU8ucQU5UTdyx3lErFUh5hNqOvtjdwRPo6tNNMoQ9tnbstHEBs0yC12ye/QJR7qAlySWOnTL9c0+vzU8GgKYvmRDjGJPP856yyxrdx9dzTIzo9+JKJYZBSFZMGCeLn85Mlh+qaG6rscYmlEHiV7qaw7xJRAfQCwOmsMYldNsLktQ0x9z9Z3fRrb/NT3FNdpeUB0ruBdcpe2LM+1fFnL8vQMMYdwFsQhllQGaFvvSW5BuQxye1KmTz0AgcfyVbfjpO/Qt3zWR6zU22ojKIgVDGOoviSpZFJ//8ADxePrr4sLlQcfBK6+Op4hBghHzoQJwGGHiechM8SaWTLZJhlihRP6enqAK64Q+V6rVgFXXVXrYlIFsaVLxYWznh1jyxCTIo/uEJPzlq4CHXlCKNHL79QLPvmD18wTcxmsLxk+PBLEpk4FbrxR/KjNmxeJiFOnCtcnIE52KxXhmBs9GlhzTeCCC8R7Z5wh1t+XvhSfR5qSyRAOMR9hzNchZhLE1JJaPX9LFbVcI47q//s8VwUyXSzTn+ufVbc1/b1Gj4Q5fHh8vWy4oXAb/upXwCc+IV6bNUs8XnaZ2JcnTAA+9CFxUnXkkcAjj4j3zzpLPN53H7DnniKT5tRTgb32itoPlSGWRqCytWVzS2ZxiIUqmQzpEFOFGpcgZnKI+ZRMuoQgX0FM9iurQOXjEFMvwNOIXT4OMfU103Quh1ga19rAgfbvxtSvrI47+VqpFLlkfd1fSdOor1EQy8/gwfHfGimQbbABcPTR4v+f/ESIYP/zP0Ig+9Of4m1MmxY/Zki3WF6U/bKmlC5teZjx4l8TEhQBBIBdBDFlS+nzcTnEfFw6pvmpy6eXgLtcVgEcYjWii2VduUru0jiVqn1PyBBztqWXhOr5Z7rryVEyaRXzfL4bRawsqcH0esmkQbTNJFaqA854ZsVZs+lsYqW+HtqIpgpiU6ZMweGHH46xY8eiVCrhlltuaWZ3CkHMIbb99vE3t9yy9gPqxUpnp7hY+eIXgd//Proj/9//RhclM2dGDjH52QcfFHeCJHkyxPp2vljpZwBiaj8zxIqdIQYAF14onFpjxogLo1tvjb+v5wiZ7jyaSibVDDGTIObCJIj1hwwxQJzYdnSIY8KwYcLl9t3vRu/fdJPY5/feW4w+29sbOXfGj4+mO+88IbQtXgy89BLwgx+I/yU2QSxN2WOzHGIml5dLmNLzx/TPqm0nCWD6zQuXmJYkiKnvDxkSX++NdoiVSvH9UM3lk9vVY4+JMmRZInn55cL9tdlmIutu2TKxfk4+Wbz/0kviN2mddWrXRd4MsXqE6od0iPm4uvR9QnXB5XWI2QQxpRQnl0PMN7zexzmkCmL1dIipbacpmfQRlWx9l5915ZGlcWz5riuftnwEMde2oL7mUzKplp8X4Xe3lVBdYur5zznniMfrrhOimPwu/v53cYz4zGfEaPZy8JRddxWP06aJ/XrxYuCb3wQOOqjW/e9DhlD9xNHyTBliusjT5zhNdNaYShj1tkIIVKacKnl9o4supmD6hAEIqn0yCEY1I3IaHGLq+y7XU6Looq0rAMmjTKqvu6bp7a1175kEOEvJZKryUptDLKns0CREOrZjq7Crmgb0AQHSuhMN+021XxTEGs/ixYsxfvx4XH311c3sRqGIOcS23VZcUEybBsyYIX6AdHSHWLkMfP/74uJD/hg+/7y4CAHExYvc2KWg8PDD8Tb1CwBJhgyxejjEMo8y2UIZYiqFXa5SSZT0AbXDfNsEMfU7sznEQglipsy9ogpi8sKio0O4a4BolE5AlJpecon4f9o04Le/Ba6/XjxXhfUhQ8QxYNq06OJIHhuAbBli8sLc1yGW5CaTbZo+q4teSSWTqtjkEqL0baGjI+40HDo0vg3oApjLBZYkgLkEMfVCVz5vNGrp8oYbRv9vt51YJ3PmiAurlSvFb9a++4p99847hTMREMeBddeN9sEBA8Q2K13MktAlk3nEtf7gELO56WwOMXU6VRgaMCA6xmZxiJlKJkM4xNK4nkxtmRxituUrlyNxJm/J5KBBUSm0yU0n++DjNvOZn+u7AdKJa3IaU7lnSAFOdbfpbVEQC4N6zqP+v/fe4obG0qXApZdGr99zD3DzzcA11wA//7kopQSAz35WiLeLF4t81i22EOcbd90F/PnP6fuVNlTf5bIyChdukccqupiEEr0U2eIQszqadFeQzakkBTg5P4Mg5u2m0zOvpJBiEl1s66pvn0xVwgiIY5nDsVUVGNVgevk77iuu6WKlvq6S3HuGslCrQ8znu9HLS3WHmEm0rUAst9wX0oiVfdN5OcTU+am/PTaHGAWx5nDooYfisssuw9HSvpvA8uXLsXDhwthfqyGdVdUNdMMNRQnKTjuZBSk9Q0xFllypDhC5MwwaFN3tf+ih+OcCZIhVSybpEKsLhS6ZVJGCmPxBXmcd8ajvu9KKf/LJQhAYM8YsiM2bF41Cuemm6foi5y0pukNMLZlUBRhZNgmI48I664j1+fLLUT/POEOc3A4cCJxySrzdkSPFMWXzzcVzWYIKpMsQSxKzbA4xV8lk6AwxVVzSg/Bd7jJdiFIv2oB0JZRpBLFSKQocB+LbpOxXo1H3Q1UQGzIkEqV/8APx+OEPR+9vuinwz3+KgTIuvli8JgWwyy8Hdt5ZCGgqPiWTaVxdPk6yojvEXBli6jSLFpnn5+MQcwlGaUeZ9AnVD+l6SusQ051raQU4HxHL1Xf1gj+NQ8zVp5AOMfV4pAfr+2wLaefX3U1BrF7YHGKlksh4BMS67ukR5xHLlgl3mEQeL/bbLzrv+M53RAyLPF7oNzd9SBuqr5e2JZZVWhxiuuvJp2RS3nBOytjyEYy8SiYdDrEsjjRbW0luOrmukoRIddTOCuIjVhqEoJp8N/X7djmabAKc+p5lXcXESlUU8hy9tKa81CAeph5F09H3xGlKpZiYXP1uliwR69PmSFP77CrFpSBWfC6//HIMHz68+re+HjrdAsRKJn3Q3RMqa64ZL4VQGT06OhnT852k4JAhQ0zuoKFHmWSGWJzCl0xKdt89/nznnc3TzZsnHvfYQziYHn44vj3Lbe/558Xj2mvHhQMfenrcmVNFc4ipJ7M2QeyjH43yQACR47T55qLf5bJwiemig0QOSqAKYmkyxHQRQL5vErlsDjHXZ/RpgfSjTKoi2KBB/u4ykzNLfe4SuXR32eDB8bZdn02ab6MzxIDIITZokPhNUZGjnsqRjOVALpLx40W59N57i+dXXSXK97/4RfF8m23i06uCkU+GmD6dzbGVxyHmErvq6RAzZZvZHGI33SSOh//7v7Xz83GIlcvJDqq0DjGf0RyThBLbqJamtnwzxFyuJx+XlZzfihW1DkUlV8cqUPmWaOqi0tKltdtMSDedXjLp6ntIAc6UFUdBLAw2QQwATjwxutlx3HHAUUeJ/995R6x3eRNj3Dgxere82TF0qMiIve468TyLIJY3VH/58loXkpplJduU72mh+j4ZYtVppCBmEIycQoLNsZUU7u7pEHOG6tvyz0zTpHHTWUruEgcEUL6bap/0afqmqxnd07V8DvHQOsqkOj89jyxpfvK7MZRMem8LqhirvmfaXnQBTj1mlrTRUufNEzcod9nFLLTqfaFDrIZ+JYhdfPHFWLBgQfXvtSy16wUnVjLpQ6kUnTjrF4OlUvwHUb2YWnvt2osbHf39LA6xQCWTKhxlMr59FFro22WX+HajC2JjxsSfjxsnXCi62C3bkOKtPsKkL+pJoWmUySIJYt3dYj8F4uLNLruIC41yGfjIR4Djjxevjx4t7vr+6lfih/GXvxTuHBsuQSxP2aPJXVYvh9jQodG0LseffG5zeSU5xPTnLpeXPp80n9WnNbXVaKQgtsEGtfuBFMQAIT7usYe7raFD48cAXaxNmyGmvq7+H9Ih5hK7soT4+84vjUNM3lCQo3ymcYh1dorpkkLn02aINSpU39WWuoy25Us7P/VYrE9nEqhsolJnZ3RTx6dkUh3pztV3H4HKJfgNGRKdU7oEsbwZYqbtig6xsLgEsZ4ekUO63XZiBGrV3XvEEcC3vy1Gs/7HP8T3cNppokTyxReBL39ZnIcAwNNP1wrDSVgyxMquXCy1tA3KdC7Xky0XS23L5kiT03SU4+9ZSiZjAlWlYnTgWF1dsbI8zZFmWD5nW6p4KI95DhdSUqh+mnwwq9hlcu/polJnJ9DRkVwGqAqftnJWfV2pffctO0yZ71azLVhC9att6cdY0/eni5Xq8ReIr/eZM0V8xZNPRnEqpvwzk0NM364oiBWf7u5uDBs2LPbXaqR2iAF2QQyIl13JC2cgGnVOZbvt4s9POEGEZko8fvRqMsQCntBUlfysDrGiZm1loN84xIYMiV/07rRT/H39uS0XTG7jstQybX6YRD0pNLmC+kJXCyGIAdH+q16EdXWJE9N//Uush332Ebkf994r3Eh77y1Erk99yt22SRBLUzJpE7XSOMSyZIip35k6upqPy0t1zKpimitPTs5HbcsVop9HTNP7WARBTLoI1HJJiTpgw4EHmkdCdqHvx2kzxNTX1c8kObbkZzo6GuMQC1Uyqf6vi8WAuDh9+ul0DjG53edxUJlGmfQJ1V+xonY9pBV5fB1iSaH6vg4xdX91CWJJDrFGzU9tyycwXz3m6CWT9XCIsWSyfqjnO3pkBCBG+X38cWCrrYD994/Of2Q55V57ARtvLP4vlYCJE6NcyE02EdvvsmVRrqsvtgwxIL4NuMoA5XuuXCxdKOlbPp8SxihDzOEQUytX9LI8eVzzKZlURRdd5MniEKuKeRZBTHVi2USeJIeYS4BzCEYu0Sw2P58MMZt4qIuVpmm6u4FyOe5IS1qfDvee1yiaqvtLfjcvvww88IBbPDRsC4C2rtRjtdwfY9uCZV1pDrEa12Qb0a8EsXYgtUMMiE5ATRcjqiB22mnR/yZB7He/EwKGvFM0cCBw++0ig6hUAnbbLbErcseqjjIZ0CFW82OXQLs4xAq/XDJHbOjQWmeXKoh1ddU6wyR6ua7HtmhEPSkseoYYEK0PXYDZZRdxAis56ihg663TtZ2mZDKkQ8z0Wfn9+jjE1OOcKlSZRK0kcUk+19vNK2q5hLikEH11Wl2Ya4Ygtsce4vs44IDa91SH2Ic+lL7tzs74Pu+TIaaXTJocVPr2Va8RK30dYqFC9W0OMZW//CXeL3VdqaNJ+wpiahlg6FB99TX9uUswUi+WfTPEfBxiPmWHgwZF+6ju7EoriEkBwuUQGzYs2r70fvnmu0kBOG92m8+6UoOis86v2b+7rYLMCR492h6fIhk0SJS3T5okMsOSKJejkve0ZZOqUK0Qu7BXR9RTS9vka1dfDXzsY9FNUlcpnc31BIht7s03hcvWlCGmi0o2h5j6nl6WpzrSTCWTsbI8h0PMliGmOdKqDiqbIOYjHmbJEOuwC2JW9556DFPe98rhcglU6rpSHVvKOgdSCn6+o0zqfTeJtvJ3+eijxTmWHNxOdZElOcR016Tk/fdr29K3K1uGGEsmSVGQDrFU2Vsuh5i0TG+xhchzkj+KaoYYIE5cxo8XPwy33BK9PmCA+IFcsAA49tjErugiVFCHmDwwM0Mstp4L73yTgtj660d3FyWqILbJJvacOvX1DTYQdzaz4CqZVE/y5Q9es0/MZVlCWrHLBymIzZ0b/YD6ZoipQlVSDph+4e7jLvN1iKnfWVqHmPpZU7suQWzIkHi/VFErqWQyTYaY3lYzMsQOOgiYPx+44ILa98aOFRdFI0bES27SoG7bPhliaUomfdxYsi3biJVFdIjp81O58cZ43zs7oxsIN98cTacLYnkcVFlLJtXXTPOzCTMrV0bLaCvLU0sMQzjETEKWj2PLp6zSNT/TKIyu+dmmAfyy29TjpK0tV0aa6lTwGWXStB6a/bvbKuy4oyhv/PGP/aafODG6Ae6DrCpJI4j97W9ilGugOhJrTKCS24WttE2KLl/7GvCHP4jRivVp5Pnx888D558PTJ8unuuupwpEZtrWWwO77hr9BsQcW33nN0uWiOOJLUNMFYzU7V0VJWyiUt5RJrVg+ur6dJRM+oqHVseWUeTRBDGTSGcrmUwhUNWIhwluuqq4Vqm4S0It7r2qSOcpVpZtJZNq3+W6eu898Sj3CZN4qC+fKQvPhCokezrEatZ7G9FUa8miRYvwomK1nT17NmbNmoWRI0diA3lno83I5KwaM0aMJKmOBibZaivxuPfe4oRw222BGTNqHWKbbip+BOWJno5ngLkuVoV0iKUumaRDrBgcfTRwww2iZHeNNYSAIk88dtwxms5VBil/NAARHJ/VKZNUMlm0E/OLLsrm/vJhxAhxzHjnHWGx3nHHsBli6utJDrGsGWJqyWRaUUt1vPhkiOliWk+POOkZMECUqvp+tr+VTAL243+pJEaIXbYsyrtLy9Zbi5wa2V7aDLE77hC/gXvuWTyH2IwZ4s6vFGZCOcT0+cn/OzpEhoi8EJHvf+xjoh9/+ENUDhXSIZa2ZFIKyr29fi4kl8hjy8VS15vLueYzP6B2Pbz3Xn6HmH4DxjW/pUvt4lp3d3T8sk0D+JVM5nWIqc9ZMtlcSiURgF8vpCD25JN+0//2t1GUw4YbCgEO4ny2UqnEHWJyWyiVxGiUUiTo7ASwKtpGXnlFPCpOpeo1wn33iT+JyfW0ZIn4kzmMfdNFpW19QsKnPy1uHMqoHh+HWFcXUCrFXU9yQIBSKe42W9wnbthEF2X5AIcAFythHBB/3yAYVZfB5hBTxUqbQLVQriuLQ6y7WxHgLK6nvvOeWAnjihVipOrOznjfKwnrylTC+Nhj4ruTES4mUUmOQKnPT3ekmUL1bSWTttJf3cCiOB1rvpukDDGHICa3K6dDzJa31mY09Ur6kUcewf5K2c/5558PADjllFMwadKkJvWquWQqmbztNuCtt8wXJCefLEQIOTLdpz4lXGATJ8ZPwrJmMmnUCGJ1cIh5h+ozQ6wYjBgB3Hln9HyttcQ2OGKEcI1JgcwVlK+WVZ18cva+uAQx9YLvpZeEO+uZZ8TzZp2Yd3TUjsQXknHjhCD2wgtCEEuTIeabA5bGIZY03zQOse7uWnHJ5hAzCW26M8skri1Z4idiuRxiesmk/tlml0wmoTqNs2BziMltceVK4MEHzaISIAaWGDwYePvtsKH6NrekfP3FF6PRNfVpFi0SwtNVV9nLHHt7xUl6T49wcKu5Zra+uxxi668vboDdeWdUBi3fP/544AtfEE6Jm24SF8lbbCHeC5EhppZM+jjEpJC9eLGf68lH5LH1CYgL3nlLJtM6xHxKJpPmZ+uXOo3cNmyOrc7OSLhPmp/se5YMMfm8szMabZclk61JWofYn/4kHj/5SeCnP61ujzHB6O23RXm+rG7p6QFKpVoXko6pDFBHzxCz9VMVcOR1w9tvi0eDqwvQhASX60m+L0XuvulKi7X5pSmZVMVDJZetKro4BBzrKJN6331KGOX85s4VN4NcJZP33y/OOXfd1b2uDj44fm7e04PSMumysuS7mUoY5bn8gw/G5ldTdrhsmThuGbLiXKH6ctkq6BN2588XVVeHHhodz1TR1rEd14iHhuUzritTW0s0R9rVVwMXXhj9/ivbOmAoVW0jmnolvd9++1VLBIkgU6j+FltEG7dOV1c0nDIAfO5z4g+IB2Fuumm6jlrQf4hSlX4mtZ3SIaZTeOEoBf3KIaYzapQQxMaMERd/66wjLixdouzHPibKdo8/3pyb44tvyeR//xtd7ALA9ttnn2eRGTdOBHrKC+g0GWJJolYWh5hJGHM5xFyiVpJDTP3sgAHxktA0n7WJZbbnQ4aIfq5aZXaXFdEhVk9Ul6ipZPLznweuuSaaRi/fBYSw8sgjya6u3/1OOKVUB5WvQ0x1dr3+ushPW7zYPM3VV5uXVd0X5s6NXJlbbRW5ubM6xDbaSJSnqzcf5PujR4tBD+68UwiIgFhfQNgMMV+HmNwnfAWxLKKSuhxpHWKuzDJfgSqtQ2zZsvhxyOSgcs1Pbht5g/fzOsTSzq+I2Z3EDymIvfii2IYHDRLfdUdHbaZxb2+Uk3TOObHfvVKpBFT6Luxfe038SXQ3jCkapm+6SMRSjovXXSfm9957VYHW6azxFeB8HGK66KIOCNDTYwnV79v/77lHDHggHWkmkWfZMrHeVaFE7fsAUZKKj34UOO44caNGa6u6rh57DDjpJDFSYV+fYn2X/V62TOyzyv5bkyF23HHiUf6mmcRKef352GPxdWX6bpYvj5dfLre4nlSxUhcPdUwloYBYRzaHmDw2v/WW+DMIn6srq8X8envF96e6J3t6UEYaYTdBgNO3K1dbsu9//at4VNY7SyYF/exKuvXJ5BDLiloyWS+HWMiSyZQZYnL+cp22bIZYf1sumSMmHY3bbCPEp513tn9m8GBxVyMvvg4xQAxIccstwmG5ySb5511EpCtvyhRRFvfSS+K5KfReFSF8gvFtDjH1s7obxiSm6Q4xV6i+S9TSxSX1+YABqOYtrVpldmbZRC4f8cwkci1a5PdZ3anWauy0kygNluVVqiD24ovAr34Vn17fJiRTp0ZOUlMJ4513CmeCehMui0Ost1fkdEkxzDSN5KSTgD/+0d53OY9nnonuYGfNENtoo9rSavX9k0+Oi2WSeo0yWamI/qrL6yOW+ATFm9qRI1bK+ekOsaRQfbX82rR8yl1+axi+qe++Yt4WW4h9YOZMsQxp25L98xUYp00TJWAHH1w7XVKGmO7eu/ZacdNoxx3jF+cuQYwOsf7P6NFR7MItt4hzuUMOEa7Nxx+vZoQBAJ57TtzUHDiwZkT7RIEKqBVdDNNVL/4HDgJOPlQIQUceKc7frrpKHI9hcNZssIFYhiVLaoUgNa5jp52ARx8V/6vB+ygBUERzS1C8aLNvOnXQANWFJM9h5LFaHrd6DGV5r78uRD557qzNr7znXsCUCvDqqyILWn4fPYYMMfV3qq9P6roqVyDa2WQTIXT5lAHK3zST60ki9/U0rqcFmpvuhReE49BQdljtu6EdMR91va8WjrVVq8QNKzm/ldq6+v3vxZ+s3lC3mQpQOvdc4MUFYoAbKUICcfFQlrOOGRPNS87P5t5Lch4OHx4fOEdd7/pNRPmbbSrFbVNBLJx9hwQhk0MsK8OHRyePgRxidS2ZtB1QXZ/pz04qB/16uUaNEo9jxojH664D/v1vMehDvZHzBMwOMZnDN2CA+DHbeefWFcOASBD75z9FidfPfiae+5RM6qLDwQcLYf3ww+Ov6w4xV8lkXoeYFLXke3qJpKtkEojEtjRClY+rK42Y1m4OsVIJuPxyEf4MxEWsb36zNuxe354kU6eK4wgQbddyuv/8RwhCuiNd5m4B6Rxif/5zbTvqo5z3d75jH4xC8rGPudtSlz/JIaYLYur7xxwjHGRHHhm5DoBsDjFd4DCVTKqvS9KUQ9oElSefFOUochpVJFan0x1ieUL1tYye6nQXXADst1+UcZnVIQaIi83HH48CwNO6sdTv5tlnI7HOJKwtXCiEiw9/WFxUqw44V2C+aX733COE5n33FRemqsCoi5W2tiiI9U9KJeDEE8X/p5wi9oW5c4X4pQ7OBUSlarvsUjMAmHW0PCC6+O+7XC1LIUG/KaIKCeWyECyOPFK8t8ceolxz441j86sKJR/5SCQM60LQ2n3njNtuC/zwh9H8OjtrQ/zffBO46y7g3XfjfS9pfX/lFWDWrHjf9eB2iTxemUSeSkX8SbevLpRsu5347ZNVQlKEM7mQdEyh84BwkN1zT3RsV/tuE1J61FB9y/xMGWIJbVVFnm9+UzjJr7su6pMu8gDxSir9e5bfzX//GxeoFPdeecQa4ka5FBafeqo6v1hbn/6syMtTb/J3d8fde/JmiMz0NCxf9ft+4AFRfnn99eb5yeXry+Rzrisdk0PMdAOjDaAgVjCkmylkqaGVcln8OI0YEawkrGgOMXVaZogVBOkMk+LUqFHAXns1Zt5dXZEgZ3KIbbmlKM+6667GCHTNZs89hUtBF1p8Sib1C/cPfEBcEMnRBn0cYj7llh0d4ntZd13h1rOF6usX9j4OMbVk0vVZV1mkT8mkKZDfNq2tj/L9VkduA08/LULgAWDChOh9myA2bVo0iqIsC5Tb0c9+JkKT9byzLA6xV14RJ6imPqv92nFHcfIsM1L0+UnOOqu2T+qjr0Nsww2FEKgLcJJBg0S/b7kF2Hzz6PU0DjGbiGUqmfRty0d0ka9ddZVwl3zpS7XT6P2SferoiAvlWQQxNUuruzvKxnr+eWDyZJHLprblU+aoHrtUbr/d3taUKWLZTWKXnObOO4WD4zOfsbezYIFwh61eLS6mVbeK2pa63JWKeX7SkfD++6JUSoqD+nej55GxZLI1+OEPhfNq1SohVMtSSL1s/KGHxKMcdVwhliEGRKOhAnaH2B57RPshEBstL+kaYUBZ/N53yuqKo48Wf0DVtVkVgnbaCfj+98Xvyz77iAHKBg0CtttOuSbp205ffFGMynz66ea+y/OMCROi9VAqAV1dGNDR16fhfct+wAHxTvdoGWK/+IVYx2q+rj4/eQNmjz1q25J9V3+PttsOOOII8X/fNaFP3lq17+UB1mliuWadnaKE/6CDavpe/Q633VZcm151laPv2jWdItLViDwA8KMfRU503Qmol/ga5lcaPFiU8+qDSBidgBCOQss05X33A37yE3E8V2/Um9x7L7wgbpb885/m+cn95sAD4+cXsXVluU7UMsToECOFoTrKZKNOBKZMEXcQ9JHPMqILYPVwiKUtmZT0O+HIgbpe+13J5KmnCtHk1FObM39ZNmlyGwHAmWeKu93twHrribth77wTjb4DZHOI6egOsZEjo4vJtA6xRx8VrgdX7psuapncVTaHmPysr0NMC3m1Cl62z9oEMVPwf6s7xHTkd//aa0IAOvJIcUEi0cvUPvEJcRE2f74IPl5zTeFUAOLlA+usI0YcU7efcjkqW3vrrbgby+YQ091hpmmAqA/KwEE1DrG11gJ22y1+UaOXE6uCmHQeyG1Pd4h1dcUHJ7Htm+o0+n6ju7rSZoipFxaq+NTbG3MoJApiuqg0dy7wla+I51OnRu2ox3GTQ0x3f+ZxrQ0YINbpeecBhx0Wfcd3321vyyX4qetK/n/77UIQUteVXO/f+x7wgx8I94Helpzm2WfF4803i2U19Ull5sxaB5xeMvmlL4kbSWrej0nMmzUrcnrq80uTFUdBrP/Q2SmyGf/nf4RTcPp0ceyaMiUuHEiHmEkQ011BF18cRbroWU9yf95nn3jUi+qGSbgRf+GeF+KEbU/ATqdeDHz2s0IwOvZYkVF7wQXx+Q0eAnzxiyJSolQSN0v7BjGLuaw+8pHoHErmz+qOrQFKCalcjg03BEolHLzpwThyiyPx2SO+Jdr/5z9rhMGYyHPU0SIP2iEqVdfDbrvFV4DNIXbaaeKGyVtvVR1zowaLG8hrGgbClW2duO2JOGzzw3DScd8SwqJ0uZrmV+4Q7d9xR/ymsy7yXPpNcW16wgn2tkw5lX3TDOwciMEDBmN41zAMQFn8xh54IPChD8XmV21r4fvR59WqENN2NW5c/DfUtt5Vh5g+zcg1hautszNeodXjMTiEvk/I6XbYQZznmNqS29vee9eMbM4MMQEFsYLR0JJJQJz46KOe5aARDrE0bfZr4chBvxb6dthBnPSPH9+c+cuLz0GDah1i7cjQoUJQkM4uoPYC3ydDTEcXte6+G7j3XjGvJCFs4kQhoMk7bAMHRneD1Qvcjg63QyxJXPJ1l7meJznRXJ9NcpepbekCbquil8h+5ztxh5hu/T/ppPiJ9dFHR+tJvfN6773iDrh6QVYuizvhw4YJZ4ssPQHsDjE5Upp6/DI5xKRYIh/l++r+su++4rkMH5Z9Uh9VQezee8Wjuj4kG20kHvVRO02oJ/NpHGJDh4r/Z8+OhLJKJQrL3nDD+DaqXrCoDiFfwUid5qKLhAtJzlNOoz6aHGK64JenZFLO54ADxOjel14qnt99t/ie0gpigChn2nJLITaVSkJUklmOer8kU6bE29KPOYDIrpk+Pb0g1tUV7/v11wsR7t13xUWsra3/9/+i9gDR587OaH9dsiS+PZj6NXOmKO2V4gkFsf5BR4f4/n/7W3Esk6WKP/+5eFy0KBqJ0uC87+kU33+3vB9xwglCcAZqhZJNNhHnAscfXyNKdHeI3/vuTve53Kd2/BT+dOyf0HXJt4R7WN4Yuf56cUMUjuuN7u6a0TFLpZLIlZw1K349JcsA5XTbbiuEvt/+Vvze/O1v1X1q7SFr45YTbsGh4w4V4nO5HD/O9+gZYn3/H3hgbJqafgHiPErdl1QXkrpPnnyymE5WUQC46bib8K/j/w/rL+x7Ye+9o+nLZaCzE9utvR1uO/E27HDY6SK0fZddquWpNfMrlYTQ19FhFMRigxkMHy5uGm2wgbmtHuW4uOee0f8DB6K7sxv3nHIP7j7lHnTcN0X8dnZ2Ap/+tDg2ffCD8XUlxdhhw4Arr4y1ZYzrkdun0vea7C+9ZNI0DVAj7Fan6e4R6+pjH4sPjqfPb5ddxW/I+PHxm2vqaKlj1xVC2Ne/Hj93Ud17KAkXnfxdazMoiBWMhobq1wFdEAtZ+ulrh1bp18KRg36dIdZsvvY1cWftiCPMDrF2RRXE5EV4KIcYIIRQeYKXNMrkN74h3D6mbEPVxaU+JpU96u4rU8mk2rZvqH7okkmbi61dtk9VsDjvPJH7USoJ0eXcc4ULAQD+93+Byy4Td8jVkuuPfjT6/6yzxL7+3HNRfoh6R71cFt+5vKiQF/yA3SG2erXYxi6+ON4OIDJkJPLCQT1RX7iwVhAD/AWxe+4Rj30n83jrrei99dYTjzLoV21DxySI2TK2VIfYHnsIkfH116NSlscfFwJZT09UsiHnq7Zly+HyKZmcOlUEt6v9lNMAZiEri0MsqWRSv2nygQ+IC+m33xZuGJPIs2CBuACW35UaOg8IJ9dTT4ltQIq1ciQwOU81nBwQ68M28qXK3XfH+24TxNRSyFIpmu6ZZ8QFpEQdWECd39prA+efby7vlo9HHCHc2XIEQVO/7r5brA8Zxt1Pz4PbHlkG/rvfCRF7xgxxHFtvPRF9oPHjg3+My/a/DGO/fRXwm9+Ii/rjjxdv9l3grzdMHN/W/dAJYp/afvsaIWHHdXbEBXtcgCsOuCL3Isj5yUcTIweOxMDOgVh3aN8ydXTEb4DoIs9nPiuOA5/8pBD1Dj9cCBk21FLHnh50lDowZsgYDO0aiqHdfTcn1NJKm1Np6ND4jZKenmh9rrVJNC+Zoauw+Zqb44AtDxW/t1deKW5MqPOz7aOaO2r04NHoLHfG16fqXOs7thrdUbvsIh47OoDOqI31dtpPbGsvvRSNbKmsh93W3Q07j91Z/AbLc8l99hEC7Wc/K9qQbW3Z15drrxXnwj/6kRjQp6srWldDlW3XIIitN2w9dHd0Y61Bfetx3LiohFiZRn0EUOMQGzt0LEooYd2RG4rqjeuuE+49Sd9vWrVfP/+9+A3p6YnEw74bxtX1uckmIjZi4sS4IKY70s47TwijbQivpAtGwx1igdGFvKaXTKoOsRbNEGul5WoIu+0W/RCrI8W1q0NMot6NlI6PNBliOibXjESWqclHUyaSrV15gSsvvkK4vJLEtCRRK2Sovs0h1i6CmHQhAUK8luy6azyP64wzov/32Uc8jhwZL1HceuvaLJsDDxR3SYHoAv9DHxI5UHfcIcRY9T25TarllGefHc0TiMrbZKg/EJUlDB4MnHOOuNjfc08hJknkxZNLEPvBD8QIYFddFbUvL4JeeSX6nNyG6+kQGzJEDIBw6qnAt74FfPzjUW7bwQdHJ/+dnWKdfO5z4gL4yiujdvouamKurvnzo/Xlcll97GNC3JQlObogNn++OKYPHpzPIbZkibgIO/ZYcQGou7okXV1C1LzjDvH9mtr67W/F37hx4vsztSW/6w9/WOS8/eUv8XnI/CW5PG+8IURItS1TaPLdd0eigc0h9tpr0TapH2v+93/F49prx8Veva199hHP998f+L//q21r8eJodL5rrhFCdpKYB1AQ66/sv78Qep59VlzMyzJeQ7kkAJyywyniH+WQikMPBe6/v1qGeOGeF2LiJhOx27q7RfuL5hArl8r47oHfDbIIp4w/BVuutSV2Xsc++vnQ7qF49NOPYkiXkmV2wAHArbdW+wQAx2x1DF5//3XsscGe9vMaE5ogViqV8OBpD2Jl78qqqw5rrSXyKmfOrM7vsM0Pw/2v3Y/9N1Z+C3fdNQqB7+7GNqO3wIwzZ2Dj4RsBE15MzpGWv7dz5kSvuc6Zd9opOo719GDU4FGY9elZWHPQmtE0o0YJJ5m8oQKLM2/nncVNgr5pjt7qaEz71DTsMGYH4Li+Y8cxx0QB9UnnSsp38L0Dv4dPjP8Edh84DrhoTlT2et551Wl2W3c3PHz6w9hiLSWUf6+9hJts4cLq/O495V4sWLYAI3pGiGk6OsR3M3VqdZov7fEl7LfRfmI7lqiCWHc3Nhi+AWZ+eibGDh0b9VUO+gBUnex//sif8drC17DZmsp+IB1iuiMNyk3sHXaIzS8+Wmr7QodYwWg1h1hdSiZTrJuWdYi16HI1HDrEItR1IbOK9BwwU1C+r0NMZdddxchAP/hB8rS2fsrvS54Uy5KxLA6xpFEmVReYfkHvI7zZBLOenrj7I2naduCww4BLLhGuAnU0RBcf/KDIGbvxxriDyIS82wxEd8QPOUQ8Tp8u3D69vVFwvpzmjTeiz3396/FyTDkq1fnni0fpYpNceaVwEA0ZEs/4kG4ulyB2991ixLQTThBCzahR0edMo0HlFcSee04IcNKZprujPvEJsQ4XLhTB7dLNJEOp1bZuvhn46U/jgpi+PZ91ligjkiPcuoSSiy8WZa8Sva399xd3yF94QZQ0AtH3ZMtIU+c3cmQ08MKMGSIX6etftwtiQCRO/utf7vLEF14Q7kS5rZjakvk2quBXKkWOm4suirbfqVPt60qWIj3wQDzkvqMjfoyTx0y5rcvvWD0GDRggSrtU9PlJcfjQQ+PTmJbz178W34HqSlNdkB//ePS/LJEl/YtSSYjhAPDd70alk33liN7ssUf1N6C7sxsT1p8QvwGsHscC39DsKHdgj/X3SCy/3HKtLeNuH4Nj65zdz8ELn38BGwzfAKnYdddI6O5ra8MRG2KzkZvFp5MO5751cMK2J2D2ubOx0zo7RdOobqy+tnZaZyesMWikeM/3/GKddaJjquszaqB8X7+2Gb0NxgwZE59O9qtvmo1GbARALGcV6TZTXGQT1p+AgQOUY9B664njT7kcH00ygcFdg/GB9T6A0pprxnN0FUqlEnZdd1cM61bOR7q6xA0ToFoeOnboWGw1aqv4h+V66FtXXR1d2GP9PeLXbZpDDADGjxlfzW8DII7DcrCGPnfbmoPWFKKgihTEbG5BoMYhNnLgSAzpGhJf520IBbGCIR1iDRllsg7o/Q4p1uR2iLVShhhLJsPADLE4998vfpx/+Uvx/LjjxAW2LP/6xCfEnaoxY5JLJqVbRLrAVDo6hPtHOmTkSZ+P2K07xE4/XWR3SDFCDdVPyhA7/HAx4t7hh8c/6xK5ZJuuUH2XY0wXucrlSBRzzdfmomg1urqES0s9oU6iVBLBx7KU0EVnp7jLPXt2tI2OHStOEisV4B//EC6Xp58WbjVZuiMdTF/6ksj1MG2rp58unDDXXGOf/7BhwMsvC8eN3HdUQUyKHHr7//iHeNx//+hz3/ym+Oyvfx1Np44gqZZUqowcGYVW64LYt74lSiZ+8Yt4f+R2WC4LkaurSwQwP/GE2H/lPgTUDnxw8cVRtpQulKxYIQSRs84SQpxNVDrySHEsMgli8rtZvVqUhZx4onCyAVGJjxSC/vY3sQ710RzlfvvYY+L7v+wy8fpllwE//rH43/QbIYe6nzxZiISmvm+yiXBZPf549F2ZLibHj48Hacv5feELwt3x7W9H5cG620xt71OfEheIK1ZEgf/6et9uu7i4pr6n/i6efba4aJXimZwuSRCzlXrPnSvWvZp/Nnp09P411wCTJontWo4WS/ofcrCTV14RrsxDD43nXYXAEGzedLbayk8w8mHIEOEwAtw3h049VThB1bJBHYMglhnZJ19BzDXdaaeJvvf9fvzuqN/hmbOewbajFXFq772Fu/BjH3P364YbxI2HFIJYLn76U/F7IW+omZDr3fX9aaW/Vq65RtxYO/FE+zSyZLKvnYM2PQhbrLkFjtzyyGiarbeOnScP7hqMpz73FB487UF7u20Ar6QLRnWUyX5qXdTFqnEjx1mmTE+mUP0WdVLFSiZbSOhrONoQxW3PHnuIocMln/mM+JNMmhT9L394N7Dc9fzWt4RTweckOI1DTF6YyZGAyuX4Ha80ZY/77CMcMRKfUSb1x4EDIYdOx4oV7pJJORCAqa0VK8xiWrtliDUC1d0lOfRQcXL7ox9F29BnPhOFJN98sxB1Pv/56DMbbSRGwpKUStHFggs1cBiIhRjjhRfE44PKyem220YjtqkOhHHjhHCnom4napmbzrhxwgmqC2KSSy8Vbh1Tftbuuwvx6rjjhJi1776Rs0pliy3Est55Z/WuttE5NHKkELJOOSUqadT3BSlsmQSx73xHzGPCBHEhPmOGeH2XXaJRyuRx4913xWij990nSlmlC0m2td56kdugUhHCvVL6U8N22wlBVXUQSmeX5CtfEaWj6oWTqa1yWVz8SUeWetyQzr+99hKjTU6dGj82qN/fPvuI7eTaa6O25Lrs6RHLvOOOYhCEv/wlKnuV62jevKitr35VPO68c7St9/QIgW/gQLHtSmfFppuKi7sXXzR/z0ceKcrJfvaz+Pf80Y8KkfgjHxFtnnKK+CP9l+HDhYBxzTViu/7e98LPY/RocXyZOzcuqjaTUkncmPnjH8PcZL36aiHQq9mXOltuGf1u2NhuO7HPLlsWH2UwCzvtJMrEXcs3apSY5zPPRPmWJg48MNb3wV2DseVaW8anGTQocrG6GDo0HrlQbwYNSi41/ehHxTmmmjmmM3KkOHa+/rr4jmyUy/GMUBPjx4vp+s4xdhm7C549+9n4ND094rfy3/8WvwFAevdiC9I/bUgtTH8vmdTFqho7Z4C2mSFGh1gwSqW4o4j4s8km4mTnppvM7++8syh98xFyTBliNrbYQrglrr/e/L6rZFJ1uZhGbPRxiMlHecdPPrpKKPVSJJu4ZuqzPh9SH2TQ8aOPigy9AQOiTBJAuBm/8IX4dqO6sULx/PPiUd7lnjgxcmsBfi64K64QwoSrREm6K+TyPP549N6YMcJd9oMf2MsFjz1WBGCvt55YLyrnnCPEqMmTRTDxiBGRW03uA3I5AXFifuSR8Xyvnh4htOyxh8ivkflDJkFsv/3EMh95ZBT2D4gyWnlsOeYYcVF1++2ifwDwk5/ER07UueCC+H5nE7H+53/ir/X0RKPiAsJxd9BB8Ysn23FRzaYzTSMHaXj22SigvqcnEm5HjRLbjhwkRRWe1Mcdd4yLtyNHRqVtn/mMOL7/6U+R0KmFZGPoUCHSPvRQ/EaGdInJC1M5v3HjohL5f/0r3tbAgcIVailZIv2UCy4Qx6FLLkm+kM9CqQRMmya2w0YKIUmceaY4LqoDFWVlt93EvpH3hlhXl7hR8Nhj+c915bEgaZ3fc4+4YVMUsbIZdHUJJ7fq0NMplUR1xlNPxUcpzcImm4j9QS9z17n1VnH+Lm8sEzrEikZ/D9XXxarxY8ZbpkyPFIE4ymTrLldT6OwUFw104KTHNUJSGsaOFY9qvpILNSdJR3V52UoZbd+1+tnBg8XF4KpV4qJYv6A84wwxWpEcha2nJwpY9XWXmR71Pk+YIC4sZL4QqQ9bbCEuGI4/XpTgnnqqcUS0GJtvDvzzn2H7IcvufvtbId5cfLEQV/73f4WLUC2vsPHlL4s/F7ogduSRwA9/KBxWhx0m3F/f/3408IjpIsrm5FGHrQeEy0GOXCW39aOOAqZMEaUyW28tnHn/939RxldPj7iYuP/+eFujR4u/t94y78cf/7hwgcnAe0mpFIlqH/6wyOdR86pMbXV3i/Vy3XX2dQCIY8H/+39R4HRPjxAMv/xl8Xn5uS99Saxf2R8Tap9N81tzTSGsPf64GIFMTrfTTkKY+8AHRNuHHy7KL9UMMUA4el97TTjNxoyJyrb//veodHe33cTIbSq6IAaYL6Yuukg4684+Oz7tEUcIF8Qpp0QjhtqWkbQGm26a7FzKi8nt22z23TcSq4tE0u+ZL4cdJs5JXK41QORvGkauJAZczrC0qBEMNtZYI16eT+gQKxr93SGmilWjBo3COkM8L3A9qI6EkSZUvw0yxFrJ+dYU5Am7KeuKNIbTThNZN1/6Uv62VMefK7vLxAEHiJOEnXcW7UybJlwlps9uuKHIFpLlb6oLTN2WBg70c4bJR73P3d0imFjmrZH6sfnmolTxvvtEPkgSX/2qEGekKJqHv/9dbDd//KN4PmGCEFmk0+iMM6Jw9RB86EOibbldXXqpcHRNmiRK1yZMiI/Cm+eGwYknRgHEMrftc58D7r03KtfbeOOorBJwCyXSJWbqU6kkHGtJ6+pjH4vKKQH7YAwf/Wj0vzoAhkpXlygRl3R3i9euuCI+eq/MowOiUUx1dtih1l2lc8EF8edS1Pp//y/KcuvpAU46KT4NIAaemDZNuLHWWks4vB57zDoCYBWTIGZi7Fjh0pMOxw9+UGxnp54qnv/oR/HpTU5dQkhxGTBAnJPI/ERCWgD+EhWM/h6qrwo148eMDyrs0SEW0arL1RQuvVTcxaR1uHl0dfmVgvngkyFmu9j+9rfFha0M+VfDWaXtX817UlFFrUGDRJlIb6+4uNWFL70tl0OMNJbu7rhLx8Xaa4vsqI4ANyU+9CHhDgvRlg877wzMnx/Nb8iQeLneL38pSupUx1ZWSiWRJTR0qHAKAWI96yLvV78qyhgBd5nNrrsKAd22L/py1VVR6bV0qeqoLgg5MICJU08V2VlrrmkXebq6RLngr38tgu9NdHaKssg777Qfp048UYhtTz0lntu+m1NPFe48AFi6VDyus07ciZuUgSNZc00hGD//fLr1/s1vimOh3M7WWEOUYp54YnuXUhFCCCkMvJIuGK0Uqr/D2jsEbZsZYhHMEAuIzJMhrYEqiNlGe3Rd3NsEiQkTgN//PhqZTUcX277xDft7n/+8KF845hjxXIamb7llvJSK5UTFJ6SA1SgxzGd+22wjyjW/+U3xPO+2uOaaogzUxahRQnSaM8cdxHzhhaIcS3VvZWGttYTAM2NG8n4NiBJpG+VyNLKliwMOiA+MYGLffYUgZjtOdXSI0S+PPlo8t02njvTmGmDBl7/8RWQCpc2D0rezE04QuXJypFNCCCGkifBKumC0UslkyPwwtW2OMslRJgmxctJJotRrr71EuWJHh3BqDR6c7BBzUSpFOUgmZPi2KWhWf2/EiLhD5NprReD0mDFimHoJBTHSTL7yFeCJJ4SzKe/IZL7ssIP4c7HGGsDpp4eZ37hxUZ6ajSeeEHljUhysNyedJHLL1JJOnSOPFHlc8+bZR/otlUQu24UXCpdWXrbbLj6oQR7UETcJIYSQJtI6CkGL0N9D9VVCjjAJZCyZbIMMsVYS+gjJzdlnR4HOAPCzn4mR8oYPj8qibOVRebjsMpEDtf/+te/ttx/wxS/ag/E7O6NwYL3Mk5Bm0dUF/PWvze5F89l2W3e5ZGg22CAqh7RRKom8tyQOPTQa+ZEQQgghNfBKumD0d4fYK/Mjd8MWa27hmDI9mUom28Ah1krLRUhwzjwz+n+nnYRote224eez//5mMQwQwsL3v+/Xjh6qTwghhBBCCKkLvJIuGP3dIdbTGV3MDeiwjNqUESkSZh1lsr8OVGCiVbPRCKkrpZLdpVUUhg0T2UgdHdEIg4QQQgghhJDgUBArGNIh1l/FmzN2PgMvvfcSjtnqmOBt53GIdZQ6+q3rzgQdYoS0KKUS8Oc/N7sXhBBCCCGEtDy8ki4Y1VEm+6l4M6RrCH76oZ/Wpe08GWKt5qJihhghhBBCCCGEEJKd/mlDamH6e8lkPZHrJMsok60mGtEhRgghhBBCCCGEZIeCWMHo76H69SSXQ6yFRpgEWnf0TEIIIYQQQgghpBFQECsYdIjZqTrE0oTq0yFGCCGEEEIIIYQQDQpiBYMOMTvSGcYMMWaIEUIIIYQQQggheaAgVjCkQ6y/jjJZTzKVTLaBQ6zVxD5CCCGEEEIIIaTeUHUpGNVRJlkyWUOmUP02yBBrNbGPEEIIIYQQQgipNxTECgZLJu10dXTFHn1oB4dYqy0bIYQQQgghhBBSb3glXTAYqm/n0v0uxb3/uRe7rrur92faIUOs1dxvhBBCCCGEEEJIvaEgVjDoELNz7NbH4titj031GTrECCGEEEIIIYQQosOSyYJBh1hYmCFGCCGEEEIIIYQQHQpiBUOG6nOUyTC0g0Os1cpBCSGEEEIIIYSQekPVpWCwZDIs7ZAh1mpiHyGEEEIIIYQQUm8oiBUMlkyGpR0cYq22bIQQQgghhBBCSL2hIFYw6BALS6tmiKm08rIRQgghhBBCCCH1gIJYwaBDLCwt6xBjySQhhBBCCCGEEJIZCmIFQzrEGKofhpbNEGPJJCGEEEIIIYQQkhmqLgVDjjLJkskwSOGo1coK1e2j1cQ+QgghhBBCCCGk3lAQKxgsmQyLFI5azUVFhxghhBBCCCGEEJIdCmIFg6H6Yak6xFrMRcUMMUIIIYQQQgghJDsUxAoGHWJhaQeHWKuVgxJCCCGEEEIIIfWGgljBoEMsLO2QIdZqYh8hhBBCCCGEEFJvKIgVDOkQ4yiTYWgHh1irLRshhBBCCCGEEFJvqLoUjOookyyZDEI7ZIi12rIRQgghhBBCCCH1hoJYwWDJZFjawSHWauWghBBCCCGEEEJIvaEgVjAYqh+WVs8Q6yh1UDwlhBBCCCGEEEJSQkGsYNAhFpZWd4i12nIRQgghhBBCCCGNgIJYwaBDLCwt7xBjfhghhBBCCCGEEJIaCmIFQzrEOMpkGOgQI4QQQgghhBBCiA5Vl4JRHWWSJZNBaPVRJimIEUIIIYQQQggh6aEgVjBYMhmWVhWOWrUUlBBCCCGEEEIIaQQUxAoGQ/XD0qrCUasKfYQQQgghhBBCSCOgIFYw6BALi8xiazXhiBlihBBCCCGEEEJIdiiIFQw6xMLSqqMxtupyEUIIIYQQQgghjYCCWIGQ7jCAo0yGolWdVK26XIQQQgghhBBCSCOg6lIgpDsMYMlkKKpOKmaIEUIIIYQQQgghpA8KYgVCdYixZDIMreqkatXBAgghhBBCCCGEkEZAQaxA0CEWnlbN2qJDjBBCCCGEEEIIyQ4FsQJBh1h4Wt0h1mrLRQghhBBCCCGENAIKYgVCdYgxVD8MrZ4h1mrON0IIIYQQQgghpBFQdSkQvZXe6v8smQxDqzqpWnW5CCGEEEIIIYSQRkBBrECwZDI8reqkYoYYIYQQQgghhBCSnUIIYldffTU22mgj9PT0YPfdd8fDDz/c7C41BYbqh6dVnVQcZZIQQgghhBBCCMlO0wWxG264Aeeffz4uueQSPProoxg/fjwOPvhgvPXWW83uWsOhQyw8rZ4h1mpCHyGEEEIIIYQQ0giaLoj98Ic/xBlnnIFTTz0VW2+9NX7xi19g0KBB+M1vftPsrjUcOsTC0+oOsVZbLkIIIYQQQgghpBE0VRBbsWIFZsyYgYkTJ1ZfK5fLmDhxIh544IGa6ZcvX46FCxfG/loJ1SHGUSbDMKBjAACgu7O7yT0JS6suFyGEEEIIIYQQ0giaai955513sHr1aqy99tqx19dee208++yzNdNffvnluPTSSxvVvYZTLpWx1wZ7oVKpUBALxHm7n4c1etbAQZse1OyuBOVD4z6EE7Y9AWfsdEazu0IIIYQQQgghhPQ7ShXVltRg3njjDay77rqYNm0aJkyYUH39wgsvxOTJk/HQQw/Fpl++fDmWL19efb5w4UKsv/76WLBgAYYNG9awfhNCCCGEEEIIIYSQ4rFw4UIMHz48UStqqkNsrbXWQkdHB958883Y62+++SbGjBlTM313dze6u1kiRgghhBBCCCGEEEKy09S6vK6uLuy88864++67q6/19vbi7rvvjjnGCCGEEEIIIYQQQggJRdOHqDv//PNxyimnYJdddsFuu+2GH//4x1i8eDFOPfXUZneNEEIIIYQQQgghhLQgTRfEjj/+eLz99tv4+te/jrlz52KHHXbAnXfeWRO0TwghhBBCCCGEEEJICJoaqp8X36A0QgghhBBCCCGEENL6+GpFTc0QI4QQQgghhBBCCCGk0VAQI4QQQgghhBBCCCFtBQUxQgghhBBCCCGEENJWUBAjhBBCCCGEEEIIIW0FBTFCCCGEEEIIIYQQ0lZQECOEEEIIIYQQQgghbQUFMUIIIYQQQgghhBDSVlAQI4QQQgghhBBCCCFtBQUxQgghhBBCCCGEENJWUBAjhBBCCCGEEEIIIW0FBTFCCCGEEEIIIYQQ0lZQECOEEEIIIYQQQgghbQUFMUIIIYQQQgghhBDSVlAQI4QQQgghhBBCCCFtBQUxQgghhBBCCCGEENJWUBAjhBBCCCGEEEIIIW0FBTFCCCGEEEIIIYQQ0lZQECOEEEIIIYQQQgghbQUFMUIIIYQQQgghhBDSVlAQI4QQQgghhBBCCCFtRWezO5CHSqUCAFi4cGGTe0IIIYQQQgghhBBCmo3UiKRmZKNfC2Lvv/8+AGD99ddvck8IIYQQQgghhBBCSFF4//33MXz4cOv7pUqSZFZgent78cYbb2Do0KEolUrN7k4QFi5ciPXXXx+vvfYahg0b1uzuEFI3uK2TdoLbO2kXuK2TdoLbO2kXuK2T/kalUsH777+PsWPHoly2J4X1a4dYuVzGeuut1+xu1IVhw4bxYEPaAm7rpJ3g9k7aBW7rpJ3g9k7aBW7rpD/hcoZJGKpPCCGEEEIIIYQQQtoKCmKEEEIIIYQQQgghpK2gIFYwuru7cckll6C7u7vZXSGkrnBbJ+0Et3fSLnBbJ+0Et3fSLnBbJ61Kvw7VJ4QQQgghhBBCCCEkLXSIEUIIIYQQQgghhJC2goIYIYQQQgghhBBCCGkrKIgRQgghhBBCCCGEkLaCghghhBBCCCGEEEIIaSsoiBWIq6++GhtttBF6enqw++674+GHH252lwhJzZQpU3D44Ydj7NixKJVKuOWWW2LvVyoVfP3rX8c666yDgQMHYuLEiXjhhRdi08ybNw8nn3wyhg0bhhEjRuC0007DokWLGrgUhCRz+eWXY9ddd8XQoUMxevRoHHXUUXjuuedi0yxbtgxnnXUW1lxzTQwZMgTHHnss3nzzzdg0r776Kj784Q9j0KBBGD16NC644AKsWrWqkYtCiJOf//zn2H777TFs2DAMGzYMEyZMwB133FF9n9s5aVWuuOIKlEolnHfeedXXuL2TVuEb3/gGSqVS7G/LLbesvs9tnbQDFMQKwg033IDzzz8fl1xyCR599FGMHz8eBx98MN56661md42QVCxevBjjx4/H1VdfbXz/u9/9Ln7yk5/gF7/4BR566CEMHjwYBx98MJYtW1ad5uSTT8ZTTz2Fu+66C7fffjumTJmCM888s1GLQIgXkydPxllnnYUHH3wQd911F1auXImDDjoIixcvrk7zhS98AbfddhtuvPFGTJ48GW+88QaOOeaY6vurV6/Ghz/8YaxYsQLTpk3Dtddei0mTJuHrX/96MxaJECPrrbcerrjiCsyYMQOPPPIIPvjBD+LII4/EU089BYDbOWlNpk+fjmuuuQbbb7997HVu76SV2GabbTBnzpzq39SpU6vvcVsnbUGFFILddtutctZZZ1Wfr169ujJ27NjK5Zdf3sReEZIPAJWbb765+ry3t7cyZsyYyve+973qa/Pnz690d3dX/vSnP1UqlUrl6aefrgCoTJ8+vTrNHXfcUSmVSpXXX3+9YX0nJC1vvfVWBUBl8uTJlUpFbNsDBgyo3HjjjdVpnnnmmQqAygMPPFCpVCqV//u//6uUy+XK3Llzq9P8/Oc/rwwbNqyyfPnyxi4AISlYY401Kr/61a+4nZOW5P3336+MGzeuctddd1X23XffyrnnnlupVHhcJ63FJZdcUhk/frzxPW7rpF2gQ6wArFixAjNmzMDEiROrr5XLZUycOBEPPPBAE3tGSFhmz56NuXPnxrb14cOHY/fdd69u6w888ABGjBiBXXbZpTrNxIkTUS6X8dBDDzW8z4T4smDBAgDAyJEjAQAzZszAypUrY9v7lltuiQ022CC2vW+33XZYe+21q9McfPDBWLhwYdV9Q0iRWL16Na6//nosXrwYEyZM4HZOWpKzzjoLH/7wh2PbNcDjOmk9XnjhBYwdOxabbLIJTj75ZLz66qsAuK2T9qGz2R0gwDvvvIPVq1fHDiYAsPbaa+PZZ59tUq8ICc/cuXMBwLity/fmzp2L0aNHx97v7OzEyJEjq9MQUjR6e3tx3nnnYc8998S2224LQGzLXV1dGDFiRGxafXs37Q/yPUKKwhNPPIEJEyZg2bJlGDJkCG6++WZsvfXWmDVrFrdz0lJcf/31ePTRRzF9+vSa93hcJ63E7rvvjkmTJmGLLbbAnDlzcOmll2LvvffGk08+yW2dtA0UxAghhJCcnHXWWXjyySdj2RuEtBJbbLEFZs2ahQULFuAvf/kLTjnlFEyePLnZ3SIkKK+99hrOPfdc3HXXXejp6Wl2dwipK4ceemj1/+233x677747NtxwQ/z5z3/GwIEDm9gzQhoHSyYLwFprrYWOjo6aUTvefPNNjBkzpkm9IiQ8cnt2betjxoypGUxi1apVmDdvHvcHUkjOPvts3H777bj33nux3nrrVV8fM2YMVqxYgfnz58em17d30/4g3yOkKHR1dWGzzTbDzjvvjMsvvxzjx4/HlVdeye2ctBQzZszAW2+9hZ122gmdnZ3o7OzE5MmT8ZOf/ASdnZ1Ye+21ub2TlmXEiBHYfPPN8eKLL/LYTtoGCmIFoKurCzvvvDPuvvvu6mu9vb24++67MWHChCb2jJCwbLzxxhgzZkxsW1+4cCEeeuih6rY+YcIEzJ8/HzNmzKhOc88996C3txe77757w/tMiI1KpYKzzz4bN998M+655x5svPHGsfd33nlnDBgwILa9P/fcc3j11Vdj2/sTTzwRE4HvuusuDBs2DFtvvXVjFoSQDPT29mL58uXczklLccABB+CJJ57ArFmzqn+77LILTj755Or/3N5Jq7Jo0SK89NJLWGeddXhsJ+1Ds1P9ieD666+vdHd3VyZNmlR5+umnK2eeeWZlxIgRsVE7COkPvP/++5WZM2dWZs6cWQFQ+eEPf1iZOXNm5ZVXXqlUKpXKFVdcURkxYkTl1ltvrTz++OOVI488srLxxhtXli5dWm3jkEMOqey4446Vhx56qDJ16tTKuHHjKieeeGKzFokQI5/97Gcrw4cPr9x3332VOXPmVP+WLFlSneYzn/lMZYMNNqjcc889lUceeaQyYcKEyoQJE6rvr1q1qrLttttWDjrooMqsWbMqd955Z2XUqFGViy++uBmLRIiRiy66qDJ58uTK7NmzK48//njloosuqpRKpco///nPSqXC7Zy0Nuook5UKt3fSOnzxi1+s3HfffZXZs2dX7r///srEiRMra621VuWtt96qVCrc1kl7QEGsQFx11VWVDTbYoNLV1VXZbbfdKg8++GCzu0RIau69994KgJq/U045pVKpVCq9vb2Vr33ta5W111670t3dXTnggAMqzz33XKyNd999t3LiiSdWhgwZUhk2bFjl1FNPrbz//vtNWBpC7Ji2cwCV3/72t9Vpli5dWvnc5z5XWWONNSqDBg2qHH300ZU5c+bE2vnPf/5TOfTQQysDBw6srLXWWpUvfvGLlZUrVzZ4aQix86lPfaqy4YYbVrq6uiqjRo2qHHDAAVUxrFLhdk5aG10Q4/ZOWoXjjz++ss4661S6uroq6667buX444+vvPjii9X3ua2TdqBUqVQqzfGmEUIIIYQQQgghhBDSeJghRgghhBBCCCGEEELaCgpihBBCCCGEEEIIIaStoCBGCCGEEEIIIYQQQtoKCmKEEEIIIYQQQgghpK2gIEYIIYQQQgghhBBC2goKYoQQQgghhBBCCCGkraAgRgghhBBCCCGEEELaCgpihBBCCCGEEEIIIaStoCBGCCGEEEIIIYQQQtoKCmKEEEIIIW3GN77xDeywww7N7gYhhBBCSNOgIEYIIYQQ0iKsWLGipedHCCGEEBIKCmKEEEIIIXVg+fLlOOecczB69Gj09PRgr732wvTp0wEA9913H0qlEv7+979j++23R09PDz7wgQ/gySefjLUxdepU7L333hg4cCDWX399nHPOOVi8eHH1/Y022gjf+ta38IlPfALDhg3DmWeeCQD48pe/jM033xyDBg3CJptsgq997WtYuXIlAGDSpEm49NJL8dhjj6FUKqFUKmHSpEkAgFdffRVHHnkkhgwZgmHDhuG4447Dm2++WZ2fdJb96le/wsYbb4yenp56rkJCCCGEkLpBQYwQQgghpA5ceOGFuOmmm3Dttdfi0UcfxWabbYaDDz4Y8+bNq05zwQUX4Ac/+AGmT5+OUaNG4fDDD68KVy+99BIOOeQQHHvssXj88cdxww03YOrUqTj77LNj8/n+97+P8ePHY+bMmfja174GABg6dCgmTZqEp59+GldeeSV++ctf4kc/+hEA4Pjjj8cXv/hFbLPNNpgzZw7mzJmD448/Hr29vTjyyCMxb948TJ48GXfddRdefvllHH/88bH5vfjii7jpppvw17/+FbNmzarjGiSEEEIIqR+lSqVSaXYnCCGEEEJaicWLF2ONNdbApEmTcNJJJwEAVq5ciY022gjnnXcedt11V+y///64/vrrq4LTvHnzsN5662HSpEk47rjjcPrpp6OjowPXXHNNtd2pU6di3333xeLFi9HT04ONNtoIO+64I26++WZnf77//e/j+uuvxyOPPAJAOL1uueWWmKB111134dBDD8Xs2bOx/vrrAwCefvppbLPNNnj44Yex66674hvf+Aa+/e1v4/XXX8eoUaNCrjJCCCGEkIZChxghhBBCSGBeeuklrFy5EnvuuWf1tQEDBmC33XbDM888U31twoQJ1f9HjhyJLbbYovr+Y489hkmTJmHIkCHVv4MPPhi9vb2YPXt29XO77LJLzfxvuOEG7LnnnhgzZgyGDBmCr371q3j11VedfX7mmWew/vrrV8UwANh6660xYsSIWJ833HBDimGEEEII6fd0NrsDhBBCCCGklkWLFuHTn/40zjnnnJr3Nthgg+r/gwcPjr33wAMP4OSTT8all16Kgw8+GMOHD8f111+PH/zgB0H6pc+PEEIIIaQ/QkGMEEIIISQwm266Kbq6unD//fdjww03BCBKJqdPn47zzjuvOt2DDz5YFbfee+89PP/889hqq60AADvttBOefvppbLbZZqnmPW3aNGy44Yb4yle+Un3tlVdeiU3T1dWF1atXx17baqut8Nprr+G1116LlUzOnz8fW2+9dao+EEIIIYQUHZZMEkIIIYQEZvDgwfjsZz+LCy64AHfeeSeefvppnHHGGViyZAlOO+206nTf/OY3cffdd+PJJ5/EJz/5Say11lo46qijAIiRIqdNm4azzz4bs2bNwgsvvIBbb721JlRfZ9y4cXj11Vdx/fXX46WXXsJPfvKTmoyxjTbaCLNnz8asWbPwzjvvYPny5Zg4cSK22247nHzyyXj00Ufx8MMP4xOf+AT23XdfY1kmIYQQQkh/hoIYIYQQQkgduOKKK3Dsscfi4x//OHbaaSe8+OKL+Mc//oE11lgjNs25556LnXfeGXPnzsVtt92Grq4uAMD222+PyZMn4/nnn8fee++NHXfcEV//+tcxduxY53yPOOIIfOELX8DZZ5+NHXbYAdOmTauOPik59thjccghh2D//ffHqFGj8Kc//QmlUgm33nor1lhjDeyzzz6YOHEiNtlkE9xwww3hVw4hhBBCSJPhKJOEEEIIIQ3mvvvuw/7774/33nsPI0aMaHZ3CCGEEELaDjrECCGEEEIIIYQQQkhbQUGMEEIIIYQQQgghhLQVLJkkhBBCCCGEEEIIIW0FHWKEEEIIIYQQQgghpK2gIEYIIYQQQgghhBBC2goKYoQQQgghhBBCCCGkraAgRgghhBBCCCGEEELaCgpihBBCCCGEEEIIIaStoCBGCCGEEEIIIYQQQtoKCmKEEEIIIYQQQgghpK2gIEYIIYQQQgghhBBC2or/D4UqQZPfzJjUAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "NPU Utilication: 71.33%, NPU Free Utilization: 28.67%.\n", - "Device synchronize 5 times, try to reduce synchronization statements to alleviate the bottleneck of operator delivery.\n", - "There are too many small operators, you can increase the batch size appropriately.\n" - ] - } - ], - "source": [ - "dataset = interface.get_data('timeline', 'op_schedule')\n", - "data = dataset.get(\"data\")\n", - "import math\n", - "op_dur = [math.log(i + 1) for i in data[0]]\n", - "op_free = [math.log(i + 1) for i in data[1]]\n", - "x = [i for i in range(len(op_dur))]\n", - "fig = plt.figure(figsize=(15, 8))\n", - "plt.plot(x, op_dur, c='r', ls='-', label='op duration')\n", - "plt.plot(x, op_free, c='g', ls='-', label='op wait')\n", - "\n", - "plt.xlabel('operator')\n", - "plt.ylabel('log(time + 1)')\n", - "plt.title('Op Schedule')\n", - "plt.legend(loc='upper right')\n", - "plt.show()\n", - "\n", - "print(dataset.get('bottleneck'))\n", - "print(dataset.get('advice'))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.1" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/profiler/advisor/utils/__init__.py b/profiler/advisor/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/utils/log.py b/profiler/advisor/utils/log.py new file mode 100644 index 0000000000..1ca111592f --- /dev/null +++ b/profiler/advisor/utils/log.py @@ -0,0 +1,63 @@ +""" +log module +""" +import logging +import os + +from profiler.advisor.common import constant as const + + +def get_log_level(): + log_level = os.getenv(const.ADVISOR_LOG_LEVEL, const.DEFAULT_LOG_LEVEL).upper() + if not hasattr(logging, log_level): + raise AttributeError(f"module 'logging' has no attribute '{log_level}', " + f"supported log level: {', '.join(const.SUPPORTED_LOG_LEVEL)}") + return log_level + + +def init_logger(ctx, param, debug_mode) -> logging.Logger: + logging.logThreads = False + logging.logMultiprocessing = False + logging.logProcesses = False + + class LevelFilter(logging.Filter): + """ + level filter, filer only log with level out + """ + + # pylint:disable=too-few-public-methods + def filter(self, record): + if record.levelno == 60: + return False + return True + + console_log_level = getattr(logging, get_log_level()) + console_handle = logging.StreamHandler() + console_handle.setLevel(console_log_level) + console_handle.addFilter(LevelFilter()) + if debug_mode and not ctx.resilient_parsing: + formatter = logging.Formatter(fmt="[%(asctime)s][%(levelname)s][%(filename)s L%(lineno)s] %(message)s", + datefmt='%Y-%m-%d,%H:%M:%S') + else: + formatter = logging.Formatter(fmt="[%(asctime)s][%(levelname)s] %(message)s", + datefmt='%Y-%m-%d,%H:%M:%S') + console_handle.setFormatter(formatter) + + # add log level out + logging.addLevelName(60, 'OUT') + logger = logging.getLogger() + setattr(logger, 'out', lambda *args: logger.log(60, *args)) + output_handle = logging.StreamHandler() + output_handle.setLevel("OUT") + formatter = logging.Formatter("%(message)s") + output_handle.setFormatter(formatter) + + logger.setLevel("DEBUG") + logger.handlers = [] + if not logger.handlers: + logger.addHandler(console_handle) + logger.addHandler(output_handle) + else: + logger.info(logger.handlers) + logger.debug("The logger of analysis have initialized successfully.") + return logger diff --git a/profiler/advisor/utils/tools.py b/profiler/advisor/utils/tools.py new file mode 100644 index 0000000000..1189675e83 --- /dev/null +++ b/profiler/advisor/utils/tools.py @@ -0,0 +1,76 @@ +from functools import partial + +import click + +CONTEXT_SETTINGS = dict(help_option_names=['-H', '-h', '--help']) + + +class ClickAliasedGroup(click.Group): + """ + Alias click command + """ + FORMAT_LIMIT_LEN = 6 + + def __init__(self, *args, **kwargs): + super(ClickAliasedGroup, self).__init__(*args, **kwargs) + self._alias_dict = {} + self._commands = {} + + def command(self, *args, **kwargs): + alias = kwargs.pop('alias', None) + decorator = super(ClickAliasedGroup, self).command(*args, **kwargs) + if not alias: + return decorator + + return partial(self._decorator_warpper, decorator, alias) + + def group(self, *args, **kwargs): + alias = kwargs.pop('alias', None) + decorator = super(ClickAliasedGroup, self).group(*args, **kwargs) + if not alias: + return decorator + + return partial(self._decorator_warpper, decorator, alias) + + def _decorator_warpper(self, decorator, alias, func=None): + cmd = decorator(func) + self._commands[cmd.name] = alias + self._alias_dict[alias] = cmd.name + return cmd + + def resolve_alias(self, cmd_name): + if cmd_name in self._alias_dict.keys(): + return self._alias_dict[cmd_name] + return cmd_name + + def get_command(self, ctx, cmd_name): + cmd_name = self.resolve_alias(cmd_name) + command = super(ClickAliasedGroup, self).get_command(ctx, cmd_name) + return command if command else None + + def format_commands(self, ctx, formatter): + rows = [] + sub_commands = self.list_commands(ctx) + max_len = 0 + if len(sub_commands) > 0: + max_len = max(len(cmd) for cmd in sub_commands) + + limit = formatter.width - self.FORMAT_LIMIT_LEN - max_len + for sub_command in sub_commands: + cmd = self.get_command(ctx, sub_command) + if cmd is None: + continue + if hasattr(cmd, 'hidden') and cmd.hidden: + continue + if sub_command in self._commands: + alias = self._commands[sub_command] + sub_command = f'{sub_command}, {alias}' + if click.__version__[0] < '7': + cmd_help = cmd.short_help or '' + else: + cmd_help = cmd.get_short_help_str(limit) + rows.append((sub_command, cmd_help)) + + if rows: + with formatter.section('Commands'): + formatter.write_dl(rows) diff --git a/profiler/advisor/utils/utils.py b/profiler/advisor/utils/utils.py new file mode 100644 index 0000000000..d7837e1e40 --- /dev/null +++ b/profiler/advisor/utils/utils.py @@ -0,0 +1,499 @@ +import json +import logging +import multiprocessing as mp +import os +import queue +import re +import stat +import time +import traceback +import types +from functools import wraps +from typing import Any, Set + +import click +import requests +from requests.adapters import HTTPAdapter +from tqdm import tqdm + +from profiler.advisor.common import constant as const +from profiler.advisor.common.timeline.fusion_ops_db import FusionOperatorDB +from profiler.advisor.common.version_control import VersionControl +from profiler.advisor.utils.log import init_logger, get_log_level + +logger = logging.getLogger() +logger.setLevel(get_log_level()) +permission_warned: Set = set() + + +def ignore_warning(exception: Exception = None): + return exception + + +class ContextObject(object): + def __init__(self): + self._debug = False + + def set_debug(self, debug=False): + self._debug = debug + + @property + def debug_mode(self): + return self._debug + + +def debug_option(f): + return click.option('--debug', '-D', + is_flag=True, + expose_value=False, + is_eager=True, + callback=init_logger, + help="Debug Mode. Shows full stack trace when error occurs.")(f) + + +def singleton(cls): + """ + :param cls: any class + :return: singleton handle + """ + _instance = {} + + def _singleton(*args: any, **kw: any) -> any: + if cls not in _instance: + _instance[cls] = cls(*args, **kw) + return _instance.get(cls) + + return _singleton + + +def lazy_property(func): + """ + Lazy loading of class attributes. + which is calculated only once when it is called for the first time, + and will not be repeated for each call after that. + """ + attr_name = "_lazy_" + func.__name__ + + @property + def _lazy_property(instance): + if not hasattr(instance, attr_name): + setattr(instance, attr_name, func(instance)) + return getattr(instance, attr_name) + + return _lazy_property + + +class CheckPathAccess: + """ + check path access permissions + """ + + # pylint: disable=no-member + def __init__(self, func): + wraps(func)(self) + self.warned = permission_warned + + def __call__(self, *args, **kwargs): + path = args[0] + if not os.access(path, os.R_OK) and path not in self.warned: + logger.warning("%s can not read, check the permissions", path) + self.warned.add(path) + return self.__wrapped__(*args, **kwargs) + + def __get__(self, instance, cls): + if instance is None: + return self + return types.MethodType(self, instance) + + +def walk_error_handler(error): + """ + handle dir walk error + """ + if error.filename not in permission_warned: + logger.warning(error) + permission_warned.add(error.filename) + + +@CheckPathAccess +def get_file_path_from_directory(path: str, check_func: Any) -> list: + """ + get file from directory + """ + file_list = [] + for root, _, files in os.walk(path, onerror=walk_error_handler): + for filename in files: + filepath = os.path.join(root, filename) + if check_func(filename): + file_list.append(filepath) + return file_list + + +@singleton +class Timer: + def __init__(self): + self.strftime = time.strftime("%Y%m%d%H%M%S", time.localtime(time.time())) + + +def get_analyze_processes(): + # n_processes not exposed to user through ma-advisor command arguments now + return min(int(os.getenv(const.MA_ADVISOR_ANALYZE_PROCESSES, 1)), const.MA_ADVISOR_MAX_PROCESSES) + + +def init_timeline_ops_db(cann_version=None, torch_version=None): + logger.debug("init operators database") + + return FusionOperatorDB(cann_version=cann_version, torch_version=torch_version) + + +def format_timeline_result(result: dict, dump_html=False): + """ + :Param result: json for api name and stack + :Return: json after format + """ + format_result = {} + if dump_html: + result = json.loads(json.dumps(result).replace("\\r\\n", "
").replace("", "<module>")) + + for key, stacks in result.items(): + api_name = key.split(":")[0] + format_result[api_name] = sorted(list(stacks.items()), key=lambda stack: stack[1], reverse=True) + return format_result + + +class ParallelJob: + + def __init__(self, src_func, ops_api_list, job_name=None): + if not callable(src_func): + raise TypeError(f"src_func should be callable") + + if not isinstance(ops_api_list, (list, tuple)): + raise TypeError(f"ops_api_list should be list or tuple") + + self.src_func = src_func + self.ops_api_list = ops_api_list + self.job_name = job_name + + def start(self, n_proccesses): + + queue = mp.Queue(len(self.ops_api_list)) + completed_queue = mp.Queue() + for i in range(len(self.ops_api_list)): + queue.put(i) + + processes = [] + listen = mp.Process(target=self.listener, args=(completed_queue, len(self.ops_api_list),)) + listen.start() + + for i in range(n_proccesses): + p = mp.Process(target=self.parallel_queue, args=(queue, completed_queue,)) + processes.append(p) + p.start() + + for p in processes: + p.join() + + completed_queue.put(None) + listen.join() + + def listener(self, completed_queue, num): + pbar = tqdm(total=num, position=0, leave=False, ncols=100, desc=self.job_name) + for _ in iter(completed_queue.get, None): + pbar.update() + pbar.refresh() + pbar.n = num + + def parallel_queue(self, job_queue, completed_queue): + while True: + try: + if job_queue.empty(): + break + token = job_queue.get(timeout=1) + except queue.Empty: + continue + self.src_func(*self.ops_api_list[token]) + completed_queue.put(token) + + +def mp_queue_to_list(job_queue): + queue_list = [] + while True: + try: + if job_queue.empty(): + break + token = job_queue.get(timeout=1) + queue_list.append(token) + except queue.Empty: + continue + return queue_list + + +def load_parameter(parameter, default): + if not os.environ.get(parameter, None): + return default + else: + return os.environ.get(parameter) + + +def get_supported_subclass(clazz: VersionControl.__class__, cann_version: str): + """ + Returns a list of subclasses that support the specified version + :param clazz: Class name which is extends to VersionControl.__class__ + :param cann_version: The CANN software version + :return: The list of subclasses that support the specified CANN version + """ + # 获取所有支持这个cann版本的子类 + dataset_classes = clazz.__subclasses__() + sub_class_list = [cls for cls in dataset_classes if cls.is_supported(cann_version)] + logger.debug("The support subclass list is %s, cann version is %s", str(sub_class_list), cann_version) + return sub_class_list + + +def to_percent(num: float) -> str: + """ + change float to percent format + """ + num = num * 100 + return f"{num:.2f}%" + + +def safe_division(numerator, denominator): + """Return 0 if denominator is 0.""" + return denominator and numerator / denominator + + +def safe_write(content, save_path): + if os.path.dirname(save_path) != "": + os.makedirs(os.path.dirname(save_path), exist_ok=True) + + with os.fdopen(os.open(save_path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, + stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP), "w") as f: + f.write(content) + + +def create_directory_for_file(file: str) -> None: + """ + create directory for file + """ + dirname = os.path.dirname(file) + if not os.path.exists(dirname): + os.makedirs(dirname) + + +class CheckPathAccess: + """ + check path access permissions + """ + + # pylint: disable=no-member + def __init__(self, func): + wraps(func)(self) + self.warned = permission_warned + + def __call__(self, *args, **kwargs): + path = args[0] + if path and not os.access(path, os.R_OK) and path not in self.warned: + logger.warning("%s can not read, check the permissions", path) + self.warned.add(path) + return self.__wrapped__(*args, **kwargs) + + def __get__(self, instance, cls): + if instance is None: + return self + return types.MethodType(self, instance) + + +@CheckPathAccess +def get_file_path_from_directory(path, check_func): + """ + get file from directory + """ + file_list = [] + + if not path: + return file_list + + if not os.path.isdir(path): + logger.warning("Expected existed directory, but got %s", path) + + for root, _, files in os.walk(path): + for filename in files: + filepath = os.path.join(root, filename) + if check_func(filename): + file_list.append(filepath) + return file_list + + +@CheckPathAccess +def get_dir_path_from_directory(path: str, check_func: Any) -> list: + """ + get file from directory + """ + file_list = [] + for root, _, files in os.walk(path, onerror=walk_error_handler): + for filename in files: + filepath = os.path.join(root, filename) + if check_func(filename): + file_list.append(filepath) + return file_list + + +def is_regex_pattern(string: str): + """ + Check if str is a regular expression. + """ + escaped_string = re.escape(string) + return not (escaped_string == string) + + +def join_prof_path(root_dir: str, sub_dir: str) -> str: + """ + regular expression matching method for path concatenation + """ + if is_regex_pattern(sub_dir): + for root, _, _ in os.walk(root_dir, onerror=walk_error_handler): + if re.match(sub_dir, os.path.basename(root)): + return root + else: + sub_dir = os.path.join(root_dir, sub_dir) + if os.path.exists(sub_dir): + return sub_dir + return "" + + +def format_excel_title(title: str) -> str: + """ + format excel title + """ + title = title.lower() + title = title.replace("(us)", '') + title = title.replace("(ns)", '') + title = title.replace("(%)", '') + title = title.replace(" ", "_") + return title + + +def format_float(num: float) -> float: + """ + format float num, round to 2 decimal places + """ + return round(num, 2) + + +class SafeOpen: + """ + safe open to check file + """ + + # pylint: disable=consider-using-with + def __init__(self, name, mode='r', encoding=None): + self.file = None + if not os.path.exists(name): + logger.warning("%s not exist, please check", name) + return + + if os.access(name, os.R_OK): + self.file = open(name, mode, encoding=encoding, errors="ignore") + else: + logger.warning("%s can not read, check the permissions", name) + + def __enter__(self): + return self.file + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.file: + self.file.close() + return True + + +def save_downloaded_file(response, url_path, file_save_path): + """保存响应体中的文件 + + 参数: + response: 请求后获取的响应体 + url_path: url路径 + file_save_path: 保存路径 + 返回: + final_file_path: 文件保存绝对路径 + """ + # 获取url路径中的文件名, 拼接在保存路径下 + file_save_path = os.path.normpath(file_save_path) + file_name = os.path.basename(url_path) + final_file_path = os.path.join(file_save_path, file_name) + # 若目标保存路径不存在,则自动生成 + if not os.path.exists(file_save_path): + os.makedirs(file_save_path) + if response.status_code <= 300: + logger.debug("Response status code is %s", response.status_code) + flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL + modes = stat.S_IWUSR | stat.S_IRUSR + # 若文件已存在,则移除已有的文件并保存最新的文件 + if os.path.exists(final_file_path): + os.remove(final_file_path) + # 保存文件 + with os.fdopen(os.open(final_file_path, flags, modes), mode="wb") as f: + f.write(response.content) + logger.info("Success to save content in: %s", os.path.abspath(final_file_path)) + else: + # 若响应码不为预期的数值, 显示相应告警 + logger.warning("Failed to save the response body. The response status code is %s. " + "Please check the network or file URL", response.status_code) + + +def request_with_retry(url_path): + """使用requests请求获取文件, 失败则进行重试, 最多请求 max_retries+1 次 + + 参数: + url_path: URL路径 + file_save_path: 云文件保存路径 + """ + logger.debug("Requesting or retrying to get %s", url_path) + + # 若从环境变量指定了保存路径,优先从环境变量中获取,若为空则使用默认的云文件保存路径constant.CLOUD_RULE_PATH + file_save_path = os.path.join(os.path.expanduser("~"), const.CLOUD_RULE_PATH) + if os.getenv(const.ADVISOR_RULE_PATH): + file_save_path = os.getenv(const.ADVISOR_RULE_PATH) + + session = requests.Session() + # 使用session发起的所有请求, 默认最多会重试 max_retries 次, 计入最初请求, 最差情况下请求 max_retries+1 次 + adapter = HTTPAdapter(max_retries=const.MAX_RETRIES) + session.mount('http://', adapter) + session.mount('https://', adapter) + + logger.debug('Session try to get response') + response = None + try: + response = session.get(url_path, timeout=const.TIMEOUT) + except Exception as e: + logger.debug("Error: %s: %s", e, traceback.format_exc()) + + if response is None: + logger.warning("Fail to download: %s, response is None, " + "please use the environment variable %s for more detailed information", + url_path, const.ADVISOR_LOG_LEVEL) + else: + try: + # 若响应码为400~600之间,response.raise_for_status抛出HTTPError错误, 跳过调用save_downloaded_file函数逻辑 + response.raise_for_status() + save_downloaded_file(response, url_path=url_path, file_save_path=file_save_path) + except Exception as e: + logger.warning("Error: %s: %s", e, traceback.format_exc()) + # 关闭 session, 清除所有装配器 + session.close() + + +def read_csv(file): + import csv + + raw_data = [] + logger.debug("Parse file %s", file) + with SafeOpen(file, encoding="utf-8") as csv_file: + try: + csv_content = csv.reader(csv_file) + for row in csv_content: + raw_data.append(row) + except OSError as error: + logger.error("Read csv file failed : %s", error) + return [] + + return raw_data diff --git a/profiler/advisor/version.py b/profiler/advisor/version.py new file mode 100644 index 0000000000..caf2acb552 --- /dev/null +++ b/profiler/advisor/version.py @@ -0,0 +1,38 @@ +import sys + + +def get_package_version(package_name) -> str: + """ + Get package version info by importlib + Args: + package_name: package name + + Returns: + version: version info string + """ + if sys.version_info >= (3, 8): + # Because importlib_metadata has been changed to importlib.metadata in py3.8 + from importlib import metadata + from importlib.metadata import PackageNotFoundError + else: + import importlib_metadata as metadata + from importlib_metadata import PackageNotFoundError + + try: + version = metadata.version(package_name) + except PackageNotFoundError: + version = "UNKNOWN" + return version + + +def print_version_callback(ctx, param, value): # NOQA + import click + + if not value or ctx.resilient_parsing: + return + click.echo('Version {}'.format(get_package_version("att_advisor"))) + ctx.exit() + + +def cli_version(): + return get_package_version("att_advisor") -- Gitee From 9311bc327a1107bc6ac1dc677c37966d1491e515 Mon Sep 17 00:00:00 2001 From: wuyuhan Date: Tue, 9 Apr 2024 19:28:19 +0800 Subject: [PATCH 02/21] =?UTF-8?q?=E5=A2=9E=E5=8A=A0timeline=E8=9E=8D?= =?UTF-8?q?=E5=90=88=E7=AE=97=E5=AD=90API=E7=9A=84analyzer=E5=92=8Cdataset?= =?UTF-8?q?=E5=AE=9E=E7=8E=B0,=20=E5=A2=9E=E5=8A=A0overall=E5=88=86?= =?UTF-8?q?=E6=9E=90=E7=9A=84=E5=AE=9E=E7=8E=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../analyzer/overall/overall_analyzer.py | 44 +++ .../fusion_ops/fusion_ops_analyzer.py | 277 ++++++++++++++++++ .../advisor/dataset/timeline_event_dataset.py | 191 ++++++++++++ 3 files changed, 512 insertions(+) create mode 100644 profiler/advisor/analyzer/overall/overall_analyzer.py create mode 100644 profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py create mode 100644 profiler/advisor/dataset/timeline_event_dataset.py diff --git a/profiler/advisor/analyzer/overall/overall_analyzer.py b/profiler/advisor/analyzer/overall/overall_analyzer.py new file mode 100644 index 0000000000..93b227fb61 --- /dev/null +++ b/profiler/advisor/analyzer/overall/overall_analyzer.py @@ -0,0 +1,44 @@ +import logging + +from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.advisor.display.html.render import HTMLRender +from profiler.advisor.result.result import OptimizeResult +from profiler.compare_tools.compare_backend.utils.constant import Constant +from profiler.compare_tools.compare_interface.comparison_interface import ComparisonInterface + +logger = logging.getLogger() + + +class OverallSummaryAnalyzer(BaseAnalyzer): + + def __init__(self, profiling_path, benchmark_profiling_path=None, **kwargs): + self.benchmark_profiling_path = benchmark_profiling_path or profiling_path + self.profiling_path = profiling_path + self.html_render = HTMLRender() + self.result = OptimizeResult() + + def optimize(self): + compare_result = ComparisonInterface(self.benchmark_profiling_path, self.profiling_path).compare( + Constant.OVERALL_COMPARE) + + headers = compare_result.get('Model Profiling Time Distribution').get("headers", []) + rows = compare_result.get('Model Profiling Time Distribution').get("rows", []) + + self.make_record() + self.make_render(headers=headers, rows=rows) + return compare_result + + def make_record(self): + pass + + def make_render(self, **kwargs): + headers = kwargs.get("headers") + rows = kwargs.get("rows") + + if not headers or not rows: + logger.info("Empty headers or rows, skip render overall analysis html") + self.html_render.render_template(key="overall", + template_dir="templates", + template_name="overall_analysis.html", + headers=kwargs.get("headers"), + rows=kwargs.get("rows")) diff --git a/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py b/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py new file mode 100644 index 0000000000..3db3ae55a1 --- /dev/null +++ b/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py @@ -0,0 +1,277 @@ +import multiprocessing +import logging +import re + +from tqdm import tqdm + +from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.advisor.common import constant as const +from profiler.advisor.common.timeline.event import TimelineEvent +from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset +from profiler.advisor.result.result import OptimizeResult +from profiler.advisor.result.item import OptimizeItem, OptimizeRecord +from profiler.advisor.utils.utils import format_timeline_result +from profiler.advisor.display.html.render import HTMLRender +from profiler.advisor.utils.utils import init_timeline_ops_db + +logger = logging.getLogger() + + +class TimelineFusionOpsAnalyzer(BaseAnalyzer): + + def __init__(self, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): + self.n_processes = n_processes + self._matched_op_index = {} if self.n_processes <= 1 else multiprocessing.Manager().dict() + self.matched_op_stacks = {} + self.cann_version = cann_version + self.torch_version = torch_version + self.empty_stacks = True + self.event_dataset = None + self.html_render = HTMLRender() + self.result = OptimizeResult() + + def optimize(self, timeline_dataset, **kwargs): + + self.event_dataset = timeline_dataset + + for mode in [const.ATEN.lower(), const.OPTIMIZER.lower()]: + + for op_combined, npu_apis in tqdm(getattr(init_timeline_ops_db(self.cann_version, self.torch_version), + f"_{mode}_op_api_map").items(), leave=False, ncols=100, + desc="Scanning timeline for affinity apis"): + for npu_api in npu_apis.split("/"): + self.find_fusion_ops(self.event_dataset, op_combined, npu_api, mode) + + self.query_stack(self.event_dataset) + + logger.info("Finish timeline analysis") + self.make_record() + self.make_render() + return self.result + + def find_fusion_ops(self, event_dataset: TimelineEventDataset, ops: str, npu_api: str, mode: str): + """ + :Param event_dataset: dataset of timeline event + :Param ops: operator combination with '-' as separator , e.g. permute-reshape + :Param npu_api: api of torch_npu, generally more efficient than torch api + :Param mode: aten or dequeue or optimizer + :Return: json of op_name and called times and detail stacks + """ + op_rule_pattern, enable_regex = self._format_rule_to_pattern(ops) + if not enable_regex: + self._match_ops(event_dataset, op_rule_pattern, npu_api, mode) + else: + try: + self._match_ops_with_regex(event_dataset, op_rule_pattern, npu_api, mode) + except Exception as e: + logger.warning("Failed to find fusion operators with regex %s, reason is %s", ops, e) + + def _match_ops(self, event_dataset: TimelineEventDataset, ops: str, npu_api: str, mode: str): + """ match operator based on fusion operators rule(without regex), + only strictly equals of op name list means matched + :Param event_dataset: dataset of timeline event + :Param ops: operator combination with '-' as separator , e.g. permute-reshape + :Param npu_api: api of torch_npu, generally more efficient than torch api + :Param mode: aten or dequeue or optimizer + """ + op_list = ops.split(const.OP_SEP) + + matched_op_index = set() + api_ops_matched = False + + for index, event in enumerate(getattr(event_dataset, mode)): + if self._replace_op_name_prefix(event.name, mode) != op_list[0]: + continue + tmp_dequeue_event_names = [self._replace_op_name_prefix(event.name, mode) for event in + getattr(event_dataset, mode)[index: index + len(op_list)]] + if tmp_dequeue_event_names != op_list: + continue + api_ops_matched = True + matched_op_index.add(event.dataset_index) + + if api_ops_matched: + self._matched_op_index[npu_api + f":{ops}"] = matched_op_index + + def _match_ops_with_regex(self, event_dataset: TimelineEventDataset, op_rule_pattern: str, npu_api: str, + mode: str): + """ match operator based on fusion operators rule(with regex), + using regex to support condition like 'a = torch.mul(xxx) if xxx else torch.add(xxx)' + :Param event_dataset: dataset of timeline event + :Param op_rule_pattern: fusion operators rule with regex definition , e.g. add-mul{0,10}, add-mul* + :Param npu_api: api of torch_npu, generally more efficient than torch api + :Param mode: aten or dequeue or optimizer + """ + matched_op_index = set() + total_op_name = "".join([f"{const.OP_SEP}{self._replace_op_name_prefix(event.name, mode)}{const.OP_SEP}" + for event in + getattr(event_dataset, mode)]) + + matched_pattern_index_tuple = [(x.start(0), x.end(0)) for x in re.finditer(op_rule_pattern, total_op_name)] + # convert list of index tuple to a whole list: [(3, 25), ...] -> [3, 25, ...] + total_ops_split_points = [num for sublist in matched_pattern_index_tuple for num in sublist] + + api_ops_matched = len(total_ops_split_points) != 0 + + op_index = [] + if 0 not in total_ops_split_points: + total_ops_split_points = [0] + total_ops_split_points + if len(list(total_op_name)) not in total_ops_split_points: + total_ops_split_points.append(len(list(total_op_name))) + + # convert total ops name like "-add-mul-xxx-div-" to small pieces like [["add", "mul"], [...], ["div"]] + # by the regex index and then calculate the real index for matched fusion operators in event dataset + for l, r in zip(total_ops_split_points, total_ops_split_points[1:]): + matched_op_flag = True if (l, r) in matched_pattern_index_tuple else False + matched_ops_list = total_op_name[l: r].strip(const.OP_SEP).split(const.OP_SEP + const.OP_SEP) + op_index.append([matched_op_flag, len(matched_ops_list)]) + for i, _ in enumerate(op_index): + if i > 0: + # calculate cumsum for indexing matched operator + op_index[i][1] = op_index[i][1] + op_index[i - 1][1] + op_index = [[False, 0]] + op_index + + for i, _ in enumerate(op_index): + if not op_index[i][0]: + continue + index = op_index[i - 1][1] + matched_op_index.add(index) + + if index > len(getattr(event_dataset, mode)) - 1: + continue + dataset_index = getattr(event_dataset, mode)[index].get("dataset_index") + matched_op_index.add(dataset_index) + + if api_ops_matched: + self._matched_op_index[npu_api + f":{op_rule_pattern}"] = sorted(list(matched_op_index)) + + def make_record(self): + """ + make record for what and how to optimize + """ + if not self.matched_op_stacks: + return + + desc = f"Found {len(format_timeline_result(self.matched_op_stacks))} apis to be replaced" \ + f" based on the runtime env cann-{self.cann_version} and torch-{self.torch_version}" + suggestion = "Please replace training api according to sub table 'Affinity training api'" + if self.empty_stacks: + desc += ", but with no stack" + suggestion = const.TIMELINE_EMPTY_STACKS_PROMPT.format( + timeline_profiling_doc_url=const.TIMELINE_WITH_STACK_DOC_URL + ) + + optimization_item = OptimizeItem( + const.AFFINITY_TRAINING_API, + desc, + [suggestion] + ) + + self.result.add(OptimizeRecord(optimization_item)) + + record_title = ["Affinity API", "Code stacks", "Stack called counts"] + self.result.add_detail(const.AFFINITY_TRAINING_API, headers=record_title) + + for api_name, stacks_info in format_timeline_result(self.matched_op_stacks).items(): + if not stacks_info: + detail = [api_name, "null", "null"] + self.result.add_detail(const.AFFINITY_TRAINING_API, detail=detail) + else: + for stack in stacks_info: + detail = [api_name, *stack] + self.result.add_detail(const.AFFINITY_TRAINING_API, detail=detail) + + def make_render(self): + format_result_for_html = format_timeline_result(dict(self.matched_op_stacks), dump_html=True) + + self.html_render.render_template(key="scheduling", + template_dir="templates", + template_name="affinity_api.html", + cann_version=self.cann_version, + torch_version=self.torch_version, + empty_stacks=self.empty_stacks, + with_stack_doc_url=const.TIMELINE_WITH_STACK_DOC_URL, + api_doc_url=const.TIMELINE_API_DOC_URL, + result=format_result_for_html) + + def query_stack(self, event_dataset: TimelineEventDataset): + if all([len(matched_index) == 0 for matched_index in self._matched_op_index.values()]): + return + + op_stack_list = event_dataset.parse_data_with_generator(self._query_stack_by_matched_index) + for op_stack in op_stack_list: + for op_rule, stack in op_stack.items(): + if op_rule not in self.matched_op_stacks: + self.matched_op_stacks[op_rule] = {} + if stack == const.TIMELINE_FUSION_OPS_NO_STACK_FLAG: + continue + if stack not in self.matched_op_stacks[op_rule]: + self.matched_op_stacks[op_rule][stack] = 0 + self.matched_op_stacks[op_rule][stack] += 1 + + def _query_stack_by_matched_index(self, index, event): + stack_record = {} + event = TimelineEvent(event) + + matched_op_rules = [] + for op_rule, matched_index in self._matched_op_index.items(): + if index not in matched_index: + continue + + matched_op_rules.append(op_rule) + stack = event.args.get(const.CALL_STACKS) + + if not stack: + logger.debug("Got empty '%s' for event %s", const.CALL_STACKS, event) + continue + + if self.empty_stacks and stack: + self.empty_stacks = False + + stack_record[op_rule] = stack + + if matched_op_rules and not stack_record: + for op_rule in matched_op_rules: + stack_record[op_rule] = const.TIMELINE_FUSION_OPS_NO_STACK_FLAG + + return stack_record + + def _replace_op_name_prefix(self, event_name, mode): + if mode == const.DEQUEUE.lower(): + op_name_prefix = f"{const.DEQUEUE}{const.DEQUEUE_SEP}" + elif mode == const.ATEN: + op_name_prefix = f"{const.ATEN}{const.ATEN_SEP}" + else: + op_name_prefix = f"{const.OPTIMIZER}.{const.OPTIMIZER_STEP}{const.OPTIMIZER_SEP}" + + return event_name.replace(op_name_prefix, "") + + def _format_rule_to_pattern(self, op_rule): + """ + Args: + op_rule: like (mul){0,1}-(add|neg){0,2}-dropout-(softmax)* + + Returns: op_pattern like (-mul-){0,1}(-add-|-neg-){0,2}(-dropout-)(-softmax-)* + """ + enable_regex = False + if "(" not in op_rule and ")" not in op_rule: + # op_rule which requires fuzzy matching mush consist of "()" + return op_rule, enable_regex + + enable_regex = True + op_pattern_list = op_rule.split(const.OP_SEP) + format_op_pattern = "" + for op_pattern in op_pattern_list: + matched_res = re.search(r'\((.*?)\)', op_pattern) + + ops_index_range = (matched_res.start() + 1, matched_res.end() - 1) if matched_res else ( + 0, len(op_pattern)) + + op_names = op_pattern[ops_index_range[0]: ops_index_range[1]] + tmp_op_names_record = [] + for op_name in op_names.split("|"): + tmp_op_names_record.append(f"{const.OP_SEP}{op_name.strip(' ')}{const.OP_SEP}") + op_suffix = op_pattern[ops_index_range[1] + 1:] + op_names_format = f"({'|'.join(tmp_op_names_record)}){op_suffix}" + + format_op_pattern += op_names_format + return format_op_pattern, enable_regex diff --git a/profiler/advisor/dataset/timeline_event_dataset.py b/profiler/advisor/dataset/timeline_event_dataset.py new file mode 100644 index 0000000000..c1134a9784 --- /dev/null +++ b/profiler/advisor/dataset/timeline_event_dataset.py @@ -0,0 +1,191 @@ +import logging +from typing import List + +import ijson +from tqdm import tqdm + +from profiler.advisor.common import constant as const +from profiler.advisor.common.timeline.event import TimelineEvent +from profiler.advisor.utils.utils import get_file_path_from_directory +from profiler.advisor.utils.utils import singleton + +logger = logging.getLogger() + + +@singleton +class TimelineEventDataset: + + def __init__(self, root_dir, **kwargs) -> None: + self._ops_with_task_type = {} + self._ops_with_stack = {} + self._torch_to_npu = {} + self._acl_to_npu = set() + self._aten: List[str] = [] + self._optimizer: List[str] = [] + self.timeline_dir = root_dir + self.timeline_data_list = get_file_path_from_directory(root_dir, lambda file: file.endswith("trace_view.json")) + self.dataset_len = None + self.analysis_mode = kwargs.get("analysis_mode") + self.task_type = kwargs.get("task_type") + self.cann_version = kwargs.get("cann_version") + self.torch_version = kwargs.get("torch_version") + + if self.analysis_mode in ["fusion_ops", "all"]: + logger.info("Load fusion operators database for cann version '%s' and torch version '%s'", + self.cann_version, self.torch_version) + + self.parse() + + if self.analysis_mode in ["op_stack", "all"]: + self._task_op_names = list(set([event_key.split("-")[0] for event_key in self._ops_with_task_type.keys()])) + + self._post_process() + + @property + def ops_with_stack(self): + return self._ops_with_stack + + @property + def torch_to_npu(self): + return self._torch_to_npu + + @property + def acl_to_npu(self): + return self._acl_to_npu + + @property + def ops_with_task_type(self): + return self._ops_with_task_type + + @property + def task_op_names(self): + return self._task_op_names + + @property + def optimizer(self): + return self._optimizer + + @property + def aten(self): + return self._aten + + @classmethod + def get_key(cls): + """ + get key of dataset + :return: key + """ + return cls.__module__.rsplit('.', maxsplit=1)[-1] + + def parse(self): + + if len(self.timeline_data_list) == 0: + logger.warning("Please ensure trace_view.json in %s, skip timeline analysis.", self.timeline_dir) + return False + + if len(self.timeline_data_list) > 1: + logger.warning("Please ensure only one trace_view.json in %s, skip timeline analysis.", self.timeline_dir) + return False + + result = self.parse_data_with_generator(self._add_event) + + if not self.dataset_len: + self.dataset_len = len(result) + + return True + + def parse_data_with_generator(self, func): + result = [] + try: + with open(self.timeline_data_list[0], "r") as f: + for i, event in tqdm(enumerate(ijson.items(f, "item")), + leave=False, ncols=100, desc="Building dataset for timeline analysis", + total=self.dataset_len): + func_res = func(index=i, event=event) + if func_res is not None: + result.append(func_res) + except Exception as e: + logger.warning("Error %s while parsing file %s, continue to timeline analysis", e, + self.timeline_data_list[0]) + return result + + def _add_ops_with_task_type(self, event): + key = f"{event.name}-{event.ts}" + self._ops_with_task_type[key] = TimelineEvent( + { + const.TASK_TYPE: event.args.get(const.TASK_TYPE), + "task_id": event.args.get("Task Id"), + "tid": event.tid, + "name": event.name, + "ts": str(event.ts) + } + ) + + def _add_ops_with_stack(self, event): + self._ops_with_stack[str(event.ts)] = TimelineEvent({"name": event.name, "dataset_index": event.dataset_index}) + + def _add_torch_to_npu(self, event): + key = f"{event.ph}-{event.id}" + self._torch_to_npu[key] = TimelineEvent({"tid": event.tid, "ts": str(event.ts)}) + + def _add_acl_to_npu(self, event): + # op with task type equals to ai_cpu which derived from acl_to_npu do not have stacks + self._acl_to_npu.add(str(event.ts)) + + def _add_optimizer(self, event: TimelineEvent): + self._optimizer.append(TimelineEvent({"name": event.name, "dataset_index": event.dataset_index})) + + def _add_aten(self, event: TimelineEvent): + self._aten.append(TimelineEvent({ + "name": event.name, "dataset_index": event.dataset_index, "ts": event.ts, "dur": event.dur + })) + + def _add_event(self, index, event): + event["dataset_index"] = index + if not isinstance(event, TimelineEvent): + event = TimelineEvent(event) + + if self.analysis_mode == "fusion_ops": + self._add_event_for_fusion_ops(event) + elif self.analysis_mode == "op_stack": + self._add_event_for_op_stack(event) + else: + self._add_event_for_fusion_ops(event) + self._add_event_for_op_stack(event) + return True + + def _add_event_for_fusion_ops(self, event): + if event.name.lower().startswith(f"{const.ATEN}{const.ATEN_SEP}") or event.name.lower().startswith( + f"{const.NPU}{const.ATEN_SEP}"): + self._add_aten(event) + return + + if event.name.startswith(f"{const.OPTIMIZER}.{const.OPTIMIZER_STEP}{const.OPTIMIZER_SEP}"): + self._add_optimizer(event) + return + + def _add_event_for_op_stack(self, event): + if event.name.lower() == const.TORCH_TO_NPU: + self._add_torch_to_npu(event) + return + + if event.args.get(const.CALL_STACKS): + self._add_ops_with_stack(event) + return + + if event.args.get(const.TASK_TYPE) and event.args.get(const.TASK_TYPE) in [const.AI_CORE, const.AI_CPU]: + self._add_ops_with_task_type(event) + return + + if event.name and event.ts and event.name == const.ACL_TO_NPU: + self._add_acl_to_npu(event) + return + + def _post_process(self): + # eliminate sub aten operator of the first level aten operator by 'ts' and 'dur', + # keep the first level aten operator contiguous + formated_atens = [] + for aten_event in sorted(self._aten, key=lambda x: x.get("ts", -1)): + if not formated_atens or not formated_atens[-1].ts_include(aten_event): + formated_atens.append(aten_event) + self._aten = formated_atens -- Gitee From b9dc511efb93e12d65b97d3ccfb647c5553c4c41 Mon Sep 17 00:00:00 2001 From: wuyuhan Date: Tue, 9 Apr 2024 19:32:41 +0800 Subject: [PATCH 03/21] =?UTF-8?q?=E5=A2=9E=E5=8A=A0cli=E4=BD=9C=E4=B8=BAad?= =?UTF-8?q?visor,=20compare=20tools,=20cluster=20analyse=E7=9A=84=E5=85=A5?= =?UTF-8?q?=E5=8F=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- profiler/cli/__init__.py | 4 + profiler/cli/analyze_cli.py | 145 +++++++++++++++++++++++++++++++++++ profiler/cli/cluster_cli.py | 0 profiler/cli/compare_cli.py | 38 +++++++++ profiler/cli/complete_cli.py | 29 +++++++ profiler/cli/entrance.py | 59 ++++++++++++++ profiler/cli/query_cli.py | 0 7 files changed, 275 insertions(+) create mode 100644 profiler/cli/__init__.py create mode 100644 profiler/cli/analyze_cli.py create mode 100644 profiler/cli/cluster_cli.py create mode 100644 profiler/cli/compare_cli.py create mode 100644 profiler/cli/complete_cli.py create mode 100644 profiler/cli/entrance.py create mode 100644 profiler/cli/query_cli.py diff --git a/profiler/cli/__init__.py b/profiler/cli/__init__.py new file mode 100644 index 0000000000..4666e2d531 --- /dev/null +++ b/profiler/cli/__init__.py @@ -0,0 +1,4 @@ +from profiler.advisor.config.config import Config +from profiler.advisor.utils.utils import Timer + +Config().set_log_path(f"ma_advisor_{Timer().strftime}.xlsx") diff --git a/profiler/cli/analyze_cli.py b/profiler/cli/analyze_cli.py new file mode 100644 index 0000000000..34215d12d1 --- /dev/null +++ b/profiler/cli/analyze_cli.py @@ -0,0 +1,145 @@ +import click +import sys +import os +import logging + +sys.path.append(os.path.join(os.path.dirname(os.path.dirname(__file__)), "compare_tools")) +sys.path.append(os.path.join(os.path.dirname(os.path.dirname(__file__)), "cluster_analyse")) + +from profiler.advisor.utils.tools import CONTEXT_SETTINGS, ClickAliasedGroup +from profiler.advisor.common import constant +from profiler.advisor.utils.utils import debug_option +from profiler.advisor.common.module_lib import AnalysisScope +from profiler.advisor.interface.interface import Interface + +logger = logging.getLogger() + + +def _analyze(dimensions, **kwargs): + is_inference = kwargs.get("is_inference", False) + user_input_mode = kwargs.get("mode") + result_list = [] + job_list = [] + + for dimension in dimensions: + + valid_modes = AnalysisScope.analyzer_list(dimension, is_inference) + if not valid_modes: + logger.info("Skip analysis of dimension %s, no analyzer", dimension) + continue + if user_input_mode and user_input_mode not in valid_modes: + logger.error("Got error mode %s for analysis dimension %s, optionals are %s", user_input_mode, dimension, + valid_modes) + continue + + analysis_modes = [user_input_mode] if user_input_mode else valid_modes + + for mode in analysis_modes: + interface = Interface(**kwargs) + job_list.append((dimension, mode, interface)) + + for i, (dimension, mode, interface) in enumerate(job_list[::-1]): + result_list.append( + interface.get_result(dimension, mode, render_html=i == len(job_list) - 1, is_inference=is_inference)) + + for result in result_list[::-1]: + if result and hasattr(result, "show"): + result.show() + break + + +@click.group(name="analyze", cls=ClickAliasedGroup) +def analyze_cli(**kwargs): + """Analyze profiling datasets and give performance optimization suggestion.""" + pass + + +@analyze_cli.command(context_settings=CONTEXT_SETTINGS, + name="all", + short_help='Analyze timeline, operators and graph.') +@click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, + help='path of trace_view.json in profiling') +@click.option('--benchmark_profiling_path', '-bp', 'benchmark_profiling_path', type=click.Path()) +@click.option('--cann_version', '-cv', 'cann_version', + type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), + default=constant.DEFAULT_CANN_VERSION, + help='The CANN software version, which can be viewed by executing the following command: ' + '"cat /usr/local/Ascend/ascend-toolkit/latest/aarch64-linux/ascend_toolkit_install.info"') +@click.option('--torch_version', '-tv', 'torch_version', + type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), + default=constant.DEFAULT_TORCH_VERSION, + help='The runtime torch version, which can be detected by exec command "pip show torch"') +@click.option('--is_inference', is_flag=True) +@debug_option +def analyze_all(**kwargs) -> None: + # 当前compare_tools必须输入两个profiling路径,att-advisor有等价功能支持输入一个Profiling路径,后续替换成对应实现 + if not kwargs.get("benchmark_profiling_path"): + kwargs["benchmark_profiling_path"] = kwargs.get("profiling_path") + + _analyze(AnalysisScope.supported_dims, **kwargs) + + +@analyze_cli.command(context_settings=CONTEXT_SETTINGS, + name="communication", + short_help='Analyze timeline, operators and graph.') +@click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, + help='path of trace_view.json in profiling') +@click.option('--benchmark_profiling_path', '-bp', 'benchmark_profiling_path', type=click.Path()) +@click.option('--cann_version', '-cv', 'cann_version', + type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), + default=constant.DEFAULT_CANN_VERSION, + help='The CANN software version, which can be viewed by executing the following command: ' + '"cat /usr/local/Ascend/ascend-toolkit/latest/aarch64-linux/ascend_toolkit_install.info"') +@click.option('--torch_version', '-tv', 'torch_version', + type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), + default=constant.DEFAULT_TORCH_VERSION, + help='The runtime torch version, which can be detected by exec command "pip show torch"') +@click.option('--mode', '-m', 'mode', default=None) +@click.option('--is_inference', is_flag=True) +@debug_option +def analyze_communication(**kwargs) -> None: + _analyze(["communication"], **kwargs) + + +@analyze_cli.command(context_settings=CONTEXT_SETTINGS, + name="scheduling", + short_help='Analyze timeline, operators and graph.') +@click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, + help='path of trace_view.json in profiling') +@click.option('--benchmark_profiling_path', '-bp', 'benchmark_profiling_path', type=click.Path()) +@click.option('--cann_version', '-cv', 'cann_version', + type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), + default=constant.DEFAULT_CANN_VERSION, + help='The CANN software version, which can be viewed by executing the following command: ' + '"cat /usr/local/Ascend/ascend-toolkit/latest/aarch64-linux/ascend_toolkit_install.info"') +@click.option('--torch_version', '-tv', 'torch_version', + type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), + default=constant.DEFAULT_TORCH_VERSION, + help='The runtime torch version, which can be detected by exec command "pip show torch"') +@click.option('--mode', '-m', 'mode', default=None) +@click.option('--is_inference', is_flag=True) +@debug_option +def analyze_scheduling(**kwargs) -> None: + _analyze(["scheduling"], **kwargs) + + +@analyze_cli.command(context_settings=CONTEXT_SETTINGS, + name="computing", + short_help='Analyze timeline, operators and graph.') +@click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, + help='path of trace_view.json in profiling') +@click.option('--benchmark_profiling_path', '-bp', 'benchmark_profiling_path', type=click.Path()) +@click.option('--cann_version', '-cv', 'cann_version', + type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), + default=constant.DEFAULT_CANN_VERSION, + help='The CANN software version, which can be viewed by executing the following command: ' + '"cat /usr/local/Ascend/ascend-toolkit/latest/aarch64-linux/ascend_toolkit_install.info"') +@click.option('--torch_version', '-tv', 'torch_version', + type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), + default=constant.DEFAULT_TORCH_VERSION, + help='The runtime torch version, which can be detected by exec command "pip show torch"') +@click.option('--mode', '-m', 'mode', default=None) +@click.option('--is_inference', is_flag=True) +@debug_option +def analyze_computing(**kwargs) -> None: + _analyze(["computing"], **kwargs) diff --git a/profiler/cli/cluster_cli.py b/profiler/cli/cluster_cli.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/cli/compare_cli.py b/profiler/cli/compare_cli.py new file mode 100644 index 0000000000..a4e69653f2 --- /dev/null +++ b/profiler/cli/compare_cli.py @@ -0,0 +1,38 @@ +import ast +import click +import os +import sys + +sys.path.append(os.path.dirname(os.path.dirname(__file__))) + +from profiler.advisor.utils.tools import CONTEXT_SETTINGS, ClickAliasedGroup +from profiler.advisor.utils.utils import debug_option +from profiler.advisor.common.timeline.event import AdvisorDict +from profiler.compare_tools.compare_backend.comparison_generator import ComparisonGenerator + + +@click.group(name="compare", cls=ClickAliasedGroup) +def compare_cli(**kwargs): + """Query operator details from timeline.""" + pass + + +@compare_cli.command(context_settings=CONTEXT_SETTINGS, + name="profiling", + short_help='Analyze timeline for specific operator and report detail code stacks.') +@click.option('--profiling_path', '-d', 'base_profiling_path', type=click.Path(), required=True, + help='path of trace_view.json in profiling') +@click.option('--benchmark_profiling_path', '-bp', 'comparison_profiling_path', type=click.Path()) +@click.option('--enable_profiling_compare', is_flag=True) +@click.option('--enable_operator_compare', is_flag=True) +@click.option('--enable_memory_compare', is_flag=True) +@click.option('--enable_communication_compare', is_flag=True) +@click.option('--output_path', '-o', 'output_path', type=click.Path()) +@click.option('--max_kernel_num', 'max_kernel_num', type=int, help="每个torch op的kernel数量限制") +@click.option('--op_name_map', type=dict, default={}, help="配置GPU与NPU等价的算子名称映射关系,以字典的形式传入", required=False) +@click.option('--use_input_shape', is_flag=True) +@click.option('--gpu_flow_cat', type=str, default='', help="gpu flow event的分类标识") +@debug_option +def compare_profiling(**kwargs) -> None: + args = AdvisorDict(kwargs) + ComparisonGenerator(args).run() diff --git a/profiler/cli/complete_cli.py b/profiler/cli/complete_cli.py new file mode 100644 index 0000000000..e4fa0caf3f --- /dev/null +++ b/profiler/cli/complete_cli.py @@ -0,0 +1,29 @@ +import click + +from profiler.advisor.utils.tools import CONTEXT_SETTINGS + + +@click.command(context_settings=CONTEXT_SETTINGS, + short_help='Auto complete ma-advisor command in terminal, support "bash(default)/zsh/fish".') +@click.argument('shell_type', nargs=1, default="Bash", type=click.Choice(["Bash", "Zsh", "Fish"], case_sensitive=False)) +def auto_complete_cli(shell_type): + """ + Auto complete ma-advisor command in terminal. + + Example: + + \b + # print bash auto complete command to terminal + ma-advisor auto-completion Bash + """ + click.echo("Tips: please paste following shell command to your terminal to activate auto completion.\n") + if shell_type.lower() == "bash": + bash_str = 'eval "$(_advisor_COMPLETE=bash_source ma-advisor)"' + elif shell_type.lower() == "zsh": + bash_str = 'eval "$(_advisor_COMPLETE=zsh_source ma-advisor)"' + elif shell_type.lower() == "fish": + bash_str = 'eval (env _advisor_COMPLETE=fish_source ma-advisor)' + else: + click.echo(f'Unsupported shell type {shell_type}.') + return + click.echo(f'{bash_str}\n') diff --git a/profiler/cli/entrance.py b/profiler/cli/entrance.py new file mode 100644 index 0000000000..b14d3dfd86 --- /dev/null +++ b/profiler/cli/entrance.py @@ -0,0 +1,59 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +import logging +import click + +from profiler.cli.analyze_cli import analyze_cli +from profiler.cli.complete_cli import auto_complete_cli +from profiler.cli.compare_cli import compare_cli + +from profiler.advisor.version import print_version_callback, cli_version + +logger = logging.getLogger() +CONTEXT_SETTINGS = dict(help_option_names=['-H', '-h', '--help'], + max_content_width=160) + +COMMAND_PRIORITY = { + "analyze": 1, + "query": 2, + "env": 3, + "auto-completion": 4 +} + + +class SpecialHelpOrder(click.Group): + + def __init__(self, *args, **kwargs): + super(SpecialHelpOrder, self).__init__(*args, **kwargs) + + def list_commands_for_help(self, ctx): + """ + reorder the list of commands when listing the help + """ + commands = super(SpecialHelpOrder, self).list_commands(ctx) + return [item[1] for item in sorted((COMMAND_PRIORITY.get(command, float('INF')), + command) for command in commands)] + + def get_help(self, ctx): + self.list_commands = self.list_commands_for_help + return super(SpecialHelpOrder, self).get_help(ctx) + + +@click.group(context_settings=CONTEXT_SETTINGS, cls=SpecialHelpOrder) +@click.option('--version', '-V', '-v', is_flag=True, + callback=print_version_callback, expose_value=False, + is_eager=True, help=cli_version()) +def advisor_cli(**kwargs): + pass + + +advisor_cli.add_command(analyze_cli, name="analyze") +advisor_cli.add_command(auto_complete_cli, name="auto-completion") +advisor_cli.add_command(compare_cli, name="compare") + +if __name__ == '__main__': + advisor_cli.main( + ["analyze", "scheduling", "-d", + r"/home/ma-user/work/profiling", + ] + ) diff --git a/profiler/cli/query_cli.py b/profiler/cli/query_cli.py new file mode 100644 index 0000000000..e69de29bb2 -- Gitee From 21c339e6278b6faca5640d301a36aa5aee5c930f Mon Sep 17 00:00:00 2001 From: wuyuhan Date: Tue, 9 Apr 2024 19:34:51 +0800 Subject: [PATCH 04/21] =?UTF-8?q?profiler=E6=89=93=E5=8C=85=E6=88=90att=5F?= =?UTF-8?q?advisor-xxx.whl?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- MANIFEST.in | 5 +++++ requirements.txt | 2 ++ requirements/build.txt | 12 ++++++++++++ requirements/test.txt | 5 +++++ setup.cfg | 32 ++++++++++++++++++++++++++++++++ setup.py | 42 ++++++++++++++++++++++++++++++++++++++++++ version.txt | 1 + 7 files changed, 99 insertions(+) create mode 100644 MANIFEST.in create mode 100644 requirements.txt create mode 100644 requirements/build.txt create mode 100644 requirements/test.txt create mode 100644 setup.cfg create mode 100644 setup.py create mode 100644 version.txt diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000000..d86534656d --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,5 @@ +recursive-include profiler/advisor/display * +recursive-include profiler/advisor/third_party/simulation/display * +recursive-include profiler/advisor/checker * +global-exclude */__pycache__/* +global-exclude *.pyc diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000..9d7eaf19f7 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,2 @@ +-r requirements/build.txt +-r requirements/tests.txt \ No newline at end of file diff --git a/requirements/build.txt b/requirements/build.txt new file mode 100644 index 0000000000..2c5638eaef --- /dev/null +++ b/requirements/build.txt @@ -0,0 +1,12 @@ +click +tabulate +networkx +jinja2 +PyYaml +tqdm +prettytable +ijson +requests +xlsxwriter +sqlalchemy +urllib3<2.0 \ No newline at end of file diff --git a/requirements/test.txt b/requirements/test.txt new file mode 100644 index 0000000000..3bacb7ca55 --- /dev/null +++ b/requirements/test.txt @@ -0,0 +1,5 @@ +pytest==6.2.4 +pytest-cov==2.12.0 +pytest-mock==3.6.1 +pytest-cookies==0.6.1 +mock==4.0.3 \ No newline at end of file diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000000..cf9acbbc4f --- /dev/null +++ b/setup.cfg @@ -0,0 +1,32 @@ +[isort] +line_length = 120 +multi_line_output = 0 +known_standard_library = setuptools +no_lines_before = STDLIB,LOCALFOLDER +default_section = THIRDPARTY +include_trailing_comma = true +force_grid_wrap = 0 +use_parentheses = true + +[flake8] +exclude = tests/* +max-line-length = 120 + +[pycodestyle] +max-line-length = 120 +exclude = tests/* + +[yapf] +BASED_ON_STYLE = pep8 +BLANK_LINE_BEFORE_NESTED_CLASS_OR_DEF = true +SPLIT_BEFORE_EXPRESSION_AFTER_OPENING_PAREN = true +COLUMN_LIMIT = 120 + +[aliases] +test=pytest + +[mypy] +ignore_missing_imports = True + +[mypy-tests.*] +ignore_errors = True diff --git a/setup.py b/setup.py new file mode 100644 index 0000000000..8ee18763b5 --- /dev/null +++ b/setup.py @@ -0,0 +1,42 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +from setuptools import find_packages, setup # type: ignore + + +extras = { + "test": [ + "pytest==6.2.4", + "pytest-cookies==0.6.1", + "pytest-cov==2.12.0", + "mock==4.0.3", + ] +} + +with open('requirements/build.txt', 'r') as f: + requires = f.read().splitlines() + +with open('requirements/test.txt', 'r') as f: + tests_requires = f.read().splitlines() +tests_requires.extend(set(requires)) + +with open('version.txt', 'r') as f: + version = f.read().strip() + +setup( + name="att-advisor", + version=version, + description="Ascend advisor tools", + packages=find_packages(), + include_package_data=True, + python_requires='>=3.7', + install_requires=requires, + package_data={'': ['*.json', '*.ini', '*.txt', '*.yaml', '*.html']}, + tests_require=tests_requires, + entry_points=""" + [console_scripts] + att-advisor=profiler.cli.entrance:advisor_cli + """ +) + +# build cmd: pip install --editable . diff --git a/version.txt b/version.txt new file mode 100644 index 0000000000..7bcd0e3612 --- /dev/null +++ b/version.txt @@ -0,0 +1 @@ +0.0.2 \ No newline at end of file -- Gitee From 808e677a8de8f64c29593395257ead0dd2325d36 Mon Sep 17 00:00:00 2001 From: fanxiaotong Date: Tue, 16 Apr 2024 20:37:29 +0800 Subject: [PATCH 05/21] framework --- profiler/advisor/analyzer/base_analyzer.py | 18 +++ .../analyzer/cluster/slow_rank_analyser.py | 69 ++++++++ .../fusion_ops/fusion_ops_analyzer.py | 20 +-- profiler/advisor/common/module_lib.py | 87 ---------- .../dataset/cluster/cluster_dataset.py | 152 ++++++++++++++++++ .../cluster/cluster_step_trace_time_bean.py | 67 ++++++++ profiler/advisor/interface/interface.py | 83 ++++------ profiler/cli/analyze_cli.py | 25 +-- profiler/cluster_analyse/cluster_analysis.py | 4 +- 9 files changed, 351 insertions(+), 174 deletions(-) create mode 100644 profiler/advisor/analyzer/cluster/slow_rank_analyser.py delete mode 100644 profiler/advisor/common/module_lib.py create mode 100644 profiler/advisor/dataset/cluster/cluster_dataset.py create mode 100644 profiler/advisor/dataset/cluster/cluster_step_trace_time_bean.py diff --git a/profiler/advisor/analyzer/base_analyzer.py b/profiler/advisor/analyzer/base_analyzer.py index f698865266..ff945da5cf 100644 --- a/profiler/advisor/analyzer/base_analyzer.py +++ b/profiler/advisor/analyzer/base_analyzer.py @@ -1,7 +1,17 @@ from abc import abstractmethod, ABCMeta +from profiler.advisor.display.html.render import HTMLRender +dataset_cls_list = [] class BaseAnalyzer(metaclass=ABCMeta): + def __init__(self, collection_path, dataset_cls_list, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): + self.n_processes = n_processes + self.cann_version = cann_version + self.torch_version = torch_version + self.html_render = HTMLRender() + self.collection_path = collection_path + self.kwargs = kwargs + self.event_dataset_list = self.get_dataset_dict(dataset_cls_list) @abstractmethod def optimize(self): @@ -14,3 +24,11 @@ class BaseAnalyzer(metaclass=ABCMeta): @abstractmethod def make_render(self): pass + + def get_dataset_dict(self, dataset_cls_list): + datasets = {key: [] for key in dataset_cls_list} + + for dataset_cls in dataset_cls_list: + if dataset_cls and callable(dataset_cls): + datasets[dataset_cls] = dataset_cls(self.collection_path, **self.kwargs) + return datasets diff --git a/profiler/advisor/analyzer/cluster/slow_rank_analyser.py b/profiler/advisor/analyzer/cluster/slow_rank_analyser.py new file mode 100644 index 0000000000..36a30d4d98 --- /dev/null +++ b/profiler/advisor/analyzer/cluster/slow_rank_analyser.py @@ -0,0 +1,69 @@ +# Copyright (c) 2023, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import defaultdict +from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.advisor.dataset.cluster.cluster_dataset import ClusterStepTraceTimeDataSet + + +class SlowRankAnalyzer(BaseAnalyzer): + RANK = "rank" + RATIO_THRESHOLD = 0.05 + BOTTLENECK_LIST = ['Computing', 'Communication', "Free"] + dataset_cls_list = [ClusterStepTraceTimeDataSet] + + def __init__(self, collection_path, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): + super().__init__(collection_path, self.dataset_cls_list, n_processes, cann_version, torch_version, **kwargs) + self.step_trace_timeData = self.event_dataset_list[ClusterStepTraceTimeDataSet] + + def optimize(self, **kwargs): + step_dict = self.process() + self.output(step_dict) + return self.output_format_data + + def process(self): + step_dict = defaultdict(lambda: [0, 0, 0, 0]) + for step_bean in self.step_trace_timeData: + if step_bean.type == self.RANK: + step_dict[step_bean.index][0] += step_bean.compute + step_dict[step_bean.index][1] += step_bean.communication + step_dict[step_bean.index][2] += step_bean.free + total_time_list = [sum(data_tuple) for rank_id, data_tuple in step_dict.items()] + if total_time_list: + mean_total_time = sum(total_time_list) / len(total_time_list) + for i in range(len(self.BOTTLENECK_LIST)): + self.produce_bottleneck(step_dict, i, mean_total_time) + return step_dict + + def produce_bottleneck(self, step_dict: dict, produce_type: int, mean_total_time: float): + data_list = [data_tuple[produce_type] for rank_id, data_tuple in step_dict.items()] + max_ratio = self.compute_max_gap_ratio(data_list, mean_total_time) + if max_ratio > self.RATIO_THRESHOLD: + self.bottelneck += f'{self.BOTTLENECK_LIST[produce_type]} has some issues in the cluster, ' \ + f'because the max difference of {self.BOTTLENECK_LIST[produce_type]} time ' \ + f'has reached {round(max_ratio * mean_total_time / 1000, 3)}ms. \n' + + def output(self, step_dict: dict): + self.output_format_data[self.DATA] = step_dict + self.output_format_data[self.BOTTLENECK] = self.bottelneck + + def make_record(self): + """ + make record for what and how to optimize + """ + pass + + def make_render(self): + pass diff --git a/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py b/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py index 3db3ae55a1..ca10dcb1f5 100644 --- a/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py +++ b/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py @@ -11,38 +11,32 @@ from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset from profiler.advisor.result.result import OptimizeResult from profiler.advisor.result.item import OptimizeItem, OptimizeRecord from profiler.advisor.utils.utils import format_timeline_result -from profiler.advisor.display.html.render import HTMLRender from profiler.advisor.utils.utils import init_timeline_ops_db logger = logging.getLogger() class TimelineFusionOpsAnalyzer(BaseAnalyzer): + dataset_cls_list = [TimelineEventDataset] - def __init__(self, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): - self.n_processes = n_processes + def __init__(self, collection_path, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): + super().__init__(collection_path, self.dataset_cls_list, n_processes, cann_version, torch_version, **kwargs) self._matched_op_index = {} if self.n_processes <= 1 else multiprocessing.Manager().dict() self.matched_op_stacks = {} - self.cann_version = cann_version - self.torch_version = torch_version self.empty_stacks = True - self.event_dataset = None - self.html_render = HTMLRender() self.result = OptimizeResult() + self.timeline_event_dataset = self.event_dataset_list[TimelineEventDataset] - def optimize(self, timeline_dataset, **kwargs): - - self.event_dataset = timeline_dataset - + def optimize(self, **kwargs): for mode in [const.ATEN.lower(), const.OPTIMIZER.lower()]: for op_combined, npu_apis in tqdm(getattr(init_timeline_ops_db(self.cann_version, self.torch_version), f"_{mode}_op_api_map").items(), leave=False, ncols=100, desc="Scanning timeline for affinity apis"): for npu_api in npu_apis.split("/"): - self.find_fusion_ops(self.event_dataset, op_combined, npu_api, mode) + self.find_fusion_ops(self.timeline_event_dataset, op_combined, npu_api, mode) - self.query_stack(self.event_dataset) + self.query_stack(self.timeline_event_dataset) logger.info("Finish timeline analysis") self.make_record() diff --git a/profiler/advisor/common/module_lib.py b/profiler/advisor/common/module_lib.py deleted file mode 100644 index 697e37f736..0000000000 --- a/profiler/advisor/common/module_lib.py +++ /dev/null @@ -1,87 +0,0 @@ -import logging - -from profiler.advisor.analyzer.scheduling.fusion_ops.fusion_ops_analyzer import TimelineFusionOpsAnalyzer -from profiler.advisor.analyzer.overall.overall_analyzer import OverallSummaryAnalyzer - -from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset - -logger = logging.getLogger() - - -class AnalysisScope: - supported_dims = ["computing", "scheduling", "communication", "overall", "dataloader"] - - @staticmethod - def get_analyzer(dimension, analyzer_name, is_inference=False): - if is_inference: - return getattr(InferenceAnalysisScope, dimension)().get(analyzer_name) - return getattr(TrainAnalysisScope, dimension)().get(analyzer_name) - - @staticmethod - def analyzer_list(dim=None, is_inference=False): - analyzer_list = [] - dims = [dim] if dim else AnalysisScope.supported_dims - for dim in dims: - analyzer_list += list(getattr(InferenceAnalysisScope, dim)().keys()) if is_inference else list( - getattr(TrainAnalysisScope, dim)().keys()) - return analyzer_list - - -class TrainAnalysisScope(AnalysisScope): - - @staticmethod - def computing(): - return dict() - - @staticmethod - def scheduling(): - return dict( - timeline_fusion_ops=TimelineFusionOpsAnalyzer - ) - - @staticmethod - def communication(): - return dict() - - @staticmethod - def overall(): - return dict( - overall_summary=OverallSummaryAnalyzer - ) - - @staticmethod - def dataloader(): - return dict() - - -class InferenceAnalysisScope(AnalysisScope): - @staticmethod - def computing(): - return dict() - - @staticmethod - def scheduling(): - return dict() - - @staticmethod - def communication(): - return dict() - - @staticmethod - def overall(): - return dict() - - @staticmethod - def dataloader(): - return dict() - - -class AnalyzerToDataset: - analyzer_to_dataset = { - "overall_summary": [], - "timeline_fusion_ops": [TimelineEventDataset] - } - - @staticmethod - def get_dataset(analyzer_name): - return AnalyzerToDataset.analyzer_to_dataset.get(analyzer_name) diff --git a/profiler/advisor/dataset/cluster/cluster_dataset.py b/profiler/advisor/dataset/cluster/cluster_dataset.py new file mode 100644 index 0000000000..40579e858c --- /dev/null +++ b/profiler/advisor/dataset/cluster/cluster_dataset.py @@ -0,0 +1,152 @@ +import logging + +import os + +import profiler.advisor.dataset.cluster.cluster_step_trace_time_bean +from profiler.advisor.utils.utils import singleton +from profiler.cluster_analyse.common_func.file_manager import FileManager +from profiler.cluster_analyse.common_func.constant import Constant +from collections import defaultdict +from profiler.cluster_analyse.cluster_analysis import ClusterAnalysis +from profiler.advisor.dataset.cluster.cluster_step_trace_time_bean import ClusterStepTraceTimeBean + +logger = logging.getLogger() + + +class ClusterDataset: + + def __init__(self, collection_path, **kwargs) -> None: + self.collection_path = os.path.realpath(collection_path) + if not self.is_cluster_analysis_output_exist(): + self.cluster_analyze() + + def is_cluster_analysis_output_exist(self): + """ + check whether input path is valid + """ + for file in os.listdir(self.collection_path): + if file == 'cluster_analysis_output': + print("[INFO]Cluster has been analyzed " + "because of the existence of cluster analysis output directory.") + print("[INFO]Skip Cluster analyze backend.") + return True + return False + + def cluster_analyze(self): + parameter = { + Constant.COLLECTION_PATH: self.collection_path, + Constant.ANALYSIS_MODE: "all" + } + print("[INFO] cluster analysis is in the process, please wait...") + try: + ClusterAnalysis(parameter).run() + except Exception as e: + raise ValueError(f"Cluster analyze backend failed:{e}") from e + + def load_csv_data(self, file_name, dataBean): + csv_path = os.path.join(self.collection_path, Constant.CLUSTER_ANALYSIS_OUTPUT, file_name) + if not os.path.exists(csv_path): + msg = "[ERROR] cluster_step_trace_time.csv doesn't exist, terminate analysis." + raise RuntimeError(msg) + data = FileManager.read_csv_file(csv_path, dataBean) + return data + + def load_json_data(self, file_name): + json_path = os.path.join(self.collection_path, Constant.CLUSTER_ANALYSIS_OUTPUT, file_name) + if not os.path.exists(json_path): + msg = "[ERROR] cluster_communication.json doesn't exist, terminate analysis." + raise RuntimeError(msg) + data = FileManager.read_json_file(json_path) + return data + + +@singleton +class ClusterStepTraceTimeDataSet(ClusterDataset): + RANK = "rank" + + def __init__(self, collection_path: str, kwargs: dict = None): + super().__init__(collection_path) + self._step_dict = defaultdict() + + def parse(self): + self.path_check() + step_data = self.load_csv_data(Constant.CLUSTER_STEP_TIME_CSV, ClusterStepTraceTimeBean) + self.step_dict = self.formate_data(step_data) + + def formate_data(self, step_data: list): + step_dict = defaultdict(lambda: [0, 0, 0, 0]) + for step_bean in step_data: + if step_bean.type == self.RANK: + step_dict[step_bean.index][0] += step_bean.compute + step_dict[step_bean.index][1] += step_bean.communication + step_dict[step_bean.index][2] += step_bean.free + return step_dict + + def get_data(self): + return self._step_dict + + +@singleton +class ClusterCommunicationDataSet(ClusterDataset): + RDMA_TIME_MS = "RDMA time(ms)" + RDMA_SIZE_MB = "RDMA size(mb)" + SDMA_TIME_MS = "SDMA time(ms)" + SDMA_SIZE_MB = "SDMA size(mb)" + RDMA_BANDWIDTH = "RDMA bandwidth(GB/s)" + SDMA_BANDWIDTH = "SDMA bandwidth(GB/s)" + COMMUNICATION_BANDWIDTH_INFO = "Communication Bandwidth Info" + TRANSIT_TIME = "Transit Time(ms)" + TRANSIT_SIZE = "Transit Size(MB)" + SDMA = "SDMA" + RDMA = "RDMA" + + def __init__(self, collection_path: str, kwargs: dict = None): + super().__init__(collection_path) + self.rank_bw_dict = defaultdict(lambda: { + self.RDMA_TIME_MS: 0, + self.RDMA_SIZE_MB: 0, + self.SDMA_TIME_MS: 0, + self.SDMA_SIZE_MB: 0, + }) + + @staticmethod + def compute_ratio(dividend: float, divisor: float): + if abs(divisor) < 1e-15: + return 0 + else: + return round(dividend / divisor, 4) + + def parse(self): + self.path_check() + communication_json = self.load_json_data() + self.process(communication_json) + + def process(self, communication_json: dict): + for comm_group, group_dict in communication_json.items(): + for step, step_dict in group_dict.items(): + for op, op_dict in step_dict.items(): + self.compute_bandwidth(op_dict) + + def compute_bandwidth(self, op_dict: dict): + for rank_id, rank_dict in op_dict.items(): + try: + rank = int(rank_id) + except ValueError as e: + msg = "[ERROR] Cluster_communication.json has invalid structure." + raise ValueError(msg) from e + for comm_type, bw_dict in rank_dict.get(self.COMMUNICATION_BANDWIDTH_INFO, {}).items(): + if comm_type == self.SDMA: + self.rank_bw_dict[rank][self.SDMA_SIZE_MB] += bw_dict.get(self.TRANSIT_SIZE) + self.rank_bw_dict[rank][self.SDMA_TIME_MS] += bw_dict.get(self.TRANSIT_TIME) + if comm_type == self.RDMA: + self.rank_bw_dict[rank][self.RDMA_SIZE_MB] += bw_dict.get(self.TRANSIT_SIZE) + self.rank_bw_dict[rank][self.RDMA_TIME_MS] += bw_dict.get(self.TRANSIT_TIME) + + for rank, rank_dict in self.rank_bw_dict.items(): + self.rank_bw_dict[rank][self.RDMA_BANDWIDTH] = self.compute_ratio( + self.rank_bw_dict[rank][self.RDMA_SIZE_MB], self.rank_bw_dict[rank][self.RDMA_TIME_MS]) + self.rank_bw_dict[rank][self.SDMA_BANDWIDTH] = self.compute_ratio( + self.rank_bw_dict[rank][self.SDMA_SIZE_MB], self.rank_bw_dict[rank][self.SDMA_TIME_MS]) + + def get_data(self): + return self.rank_bw_dict diff --git a/profiler/advisor/dataset/cluster/cluster_step_trace_time_bean.py b/profiler/advisor/dataset/cluster/cluster_step_trace_time_bean.py new file mode 100644 index 0000000000..b108fc77a3 --- /dev/null +++ b/profiler/advisor/dataset/cluster/cluster_step_trace_time_bean.py @@ -0,0 +1,67 @@ +# Copyright (c) 2023, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class ClusterStepTraceTimeBean: + STEP = "Step" + TYPE = "Type" + INDEX = "Index" + COMPUTING = "Computing" + COMMUNICATION = "Communication(Not Overlapped)" + FREE = "Free" + + def __init__(self, data: dict): + self._data = data + + @property + def step(self) -> str: + return self._data.get(self.STEP, '') + + @property + def type(self) -> str: + return self._data.get(self.TYPE, '') + + @property + def index(self) -> int: + try: + return int(self._data.get(self.INDEX)) + except ValueError as e: + msg = "[ERROR] Cluster step trace time.csv has invalid value in column 'Index'." + raise ValueError(msg) from e + + @property + def compute(self) -> float: + try: + return float(self._data.get(self.COMPUTING, '')) + except ValueError as e: + msg = "[ERROR] Cluster step trace time.csv has invalid value in column 'Computing'." + raise ValueError(msg) from e + + @property + def communication(self) -> float: + try: + return float(self._data.get(self.COMMUNICATION, '')) + except ValueError as e: + msg = "[ERROR] Cluster step trace time.csv has invalid value in column 'Communication'." + raise ValueError(msg) from e + + @property + def free(self) -> float: + try: + return float(self._data.get(self.FREE, '')) + except ValueError as e: + msg = "[ERROR] Cluster step trace time.csv has invalid value in column 'Free'." + raise ValueError(msg) from e + diff --git a/profiler/advisor/interface/interface.py b/profiler/advisor/interface/interface.py index 4a2eae244a..54e7dcdc3c 100644 --- a/profiler/advisor/interface/interface.py +++ b/profiler/advisor/interface/interface.py @@ -1,66 +1,47 @@ import os -from profiler.advisor.common.module_lib import AnalysisScope, AnalyzerToDataset +from profiler.advisor.analyzer.scheduling.fusion_ops.fusion_ops_analyzer import TimelineFusionOpsAnalyzer +from profiler.advisor.analyzer.overall.overall_analyzer import OverallSummaryAnalyzer +from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset from profiler.advisor.utils.utils import Timer +from profiler.advisor.result.result import OptimizeResult +from profiler.advisor.analyzer.cluster.slow_rank_analyser import SlowRankAnalyzer class Interface: + supported_analysiser = { + "computing": [], + "scheduling": [TimelineFusionOpsAnalyzer], + "communication": [], + "overall": [], + "dataloader": [], + "cluster": [SlowRankAnalyzer] + } + + all_dimension = supported_analysiser.keys() + def __init__(self, **kwargs): self.collection_path = os.path.realpath(kwargs.get("profiling_path")) - self._analyzer_controller = AnalyzerController(**kwargs) - self._dataset_controller = DatasetController(collection_path=self.collection_path, **kwargs) - def get_result(self: any, dimension: str, mode: str=None, render_html=False, **kwargs): + @staticmethod + def get_analyzer(dimension, is_inference=False): + return Interface.supported_analysiser.get(dimension, []) + + def get_result(self: any, dimension: str, render_html=False, **kwargs): """ :Param mode: affinity apis, ai cpu and so on. """ - analyzer = self._analyzer_controller.create_analyzer(dimension, mode, kwargs.get("is_inference", False)) - - datasets = self._dataset_controller.create_dataset(mode) - if not analyzer: - return - - if datasets: - result = analyzer.optimize(*datasets) - else: - result = analyzer.optimize() - - if render_html: - if hasattr(analyzer, "html_render"): - analyzer.html_render.render_html() - analyzer.html_render.save_to_file(f'att_advisor_{Timer().strftime}.html') - - return result - - -class AnalyzerController: - - def __init__(self, **kwargs): - self.temp_input_path = None - self.kwargs = kwargs - - def create_analyzer(self, dimension, mode: str, is_inference=False): - clss = AnalysisScope.get_analyzer(dimension, mode, is_inference) - if clss and callable(clss): - return clss(**self.kwargs) - return None - - -class DatasetController: - - def __init__(self, **kwargs): - self.collection_path = kwargs.get("collection_path") - self.kwargs = kwargs - - def create_dataset(self, mode): - dataset_cls_list = AnalyzerToDataset.get_dataset(mode) - - datasets = [] - for dataset_cls in dataset_cls_list: - if dataset_cls and callable(dataset_cls): - datasets.append(dataset_cls(self.collection_path, **self.kwargs)) - - return datasets + result_list = [] + analysiser_list = self.get_analyzer(dimension, kwargs.get("is_inference", False)) + for clss in analysiser_list: + if clss and callable(clss): + analysiser = clss(collection_path = self.collection_path, render_html=render_html, **kwargs) + result_list.append(analysiser.optimize()) + if render_html: + if hasattr(analysiser, "html_render"): + analysiser.html_render.render_html() + analysiser.html_render.save_to_file(f'att_advisor_{Timer().strftime}.html') + return result_list if __name__ == "__main__": diff --git a/profiler/cli/analyze_cli.py b/profiler/cli/analyze_cli.py index 34215d12d1..2efecffcb7 100644 --- a/profiler/cli/analyze_cli.py +++ b/profiler/cli/analyze_cli.py @@ -9,38 +9,21 @@ sys.path.append(os.path.join(os.path.dirname(os.path.dirname(__file__)), "cluste from profiler.advisor.utils.tools import CONTEXT_SETTINGS, ClickAliasedGroup from profiler.advisor.common import constant from profiler.advisor.utils.utils import debug_option -from profiler.advisor.common.module_lib import AnalysisScope from profiler.advisor.interface.interface import Interface logger = logging.getLogger() def _analyze(dimensions, **kwargs): - is_inference = kwargs.get("is_inference", False) - user_input_mode = kwargs.get("mode") result_list = [] job_list = [] for dimension in dimensions: - - valid_modes = AnalysisScope.analyzer_list(dimension, is_inference) - if not valid_modes: - logger.info("Skip analysis of dimension %s, no analyzer", dimension) - continue - if user_input_mode and user_input_mode not in valid_modes: - logger.error("Got error mode %s for analysis dimension %s, optionals are %s", user_input_mode, dimension, - valid_modes) - continue - - analysis_modes = [user_input_mode] if user_input_mode else valid_modes - - for mode in analysis_modes: interface = Interface(**kwargs) - job_list.append((dimension, mode, interface)) + job_list.append((dimension, interface)) - for i, (dimension, mode, interface) in enumerate(job_list[::-1]): - result_list.append( - interface.get_result(dimension, mode, render_html=i == len(job_list) - 1, is_inference=is_inference)) + for i, (dimension, interface) in enumerate(job_list[::-1]): + result_list += interface.get_result(dimension, render_html=i == len(job_list) - 1) for result in result_list[::-1]: if result and hasattr(result, "show"): @@ -76,7 +59,7 @@ def analyze_all(**kwargs) -> None: if not kwargs.get("benchmark_profiling_path"): kwargs["benchmark_profiling_path"] = kwargs.get("profiling_path") - _analyze(AnalysisScope.supported_dims, **kwargs) + _analyze(Interface.all_dimension, **kwargs) @analyze_cli.command(context_settings=CONTEXT_SETTINGS, diff --git a/profiler/cluster_analyse/cluster_analysis.py b/profiler/cluster_analyse/cluster_analysis.py index 2445462211..fd127fdc03 100644 --- a/profiler/cluster_analyse/cluster_analysis.py +++ b/profiler/cluster_analyse/cluster_analysis.py @@ -25,7 +25,7 @@ from common_func.path_manager import PathManager from analysis.analysis_facade import AnalysisFacade -class Interface: +class ClusterAnalysis: ASCEND_PT = "ascend_pt" ASCEND_MS = "ascend_ms" @@ -88,4 +88,4 @@ if __name__ == "__main__": Constant.COLLECTION_PATH: args_parsed.collection_path, Constant.ANALYSIS_MODE: args_parsed.mode } - Interface(parameter).run() + ClusterAnalysis(parameter).run() -- Gitee From d251c21d769d7db97b71d8d4026b311aa087f799 Mon Sep 17 00:00:00 2001 From: zhaolei Date: Thu, 25 Apr 2024 09:58:17 +0800 Subject: [PATCH 06/21] =?UTF-8?q?att=20advisor=20html=E5=AE=9E=E7=8E=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../html/templates/cluster_analysis.html | 49 +++++++++++++++++++ .../html/templates/compute_analysis.html | 29 +++++++++++ .../advisor/display/html/templates/main.html | 3 +- .../html/templates/timeline_analysis.html | 34 +++++++++++++ 4 files changed, 114 insertions(+), 1 deletion(-) create mode 100644 profiler/advisor/display/html/templates/cluster_analysis.html create mode 100644 profiler/advisor/display/html/templates/compute_analysis.html create mode 100644 profiler/advisor/display/html/templates/timeline_analysis.html diff --git a/profiler/advisor/display/html/templates/cluster_analysis.html b/profiler/advisor/display/html/templates/cluster_analysis.html new file mode 100644 index 0000000000..32379d56fc --- /dev/null +++ b/profiler/advisor/display/html/templates/cluster_analysis.html @@ -0,0 +1,49 @@ +
+

{{title|safe}}

+
+
+ + {% if result.get("Description") %} +
Description
+ + {% endif %} + + {% if result.get("Suggestion") %} +
Suggestion
+ + {% endif %} + + {% if result.get("details") %} +
details
+
+ {% for item in result.get("details") %} + + + {% for header in item.get("headers") %} + + {% endfor %} + + {% for row in item.get("data") %} + + {% for element in row %} + {% if element is number %} + + {% else %} + + {% endif %} + {% endfor %} + + {% endfor %} +
{{ header }}
{{ element|round(2) }}{{ element }}
+ {% endfor %} +
+ {% endif %} + +
+ +
+
\ No newline at end of file diff --git a/profiler/advisor/display/html/templates/compute_analysis.html b/profiler/advisor/display/html/templates/compute_analysis.html new file mode 100644 index 0000000000..e1907c091b --- /dev/null +++ b/profiler/advisor/display/html/templates/compute_analysis.html @@ -0,0 +1,29 @@ +
+

Abnormal Performance Operator

+
+ {{table.get("title")}} + + + + {% for header in table.get("headers") %} + + {% endfor %} + + {% for row in table.get("rows") %} + + {% for element in row %} + {% if element is number %} + + {% else %} + + {% endif %} + {% endfor %} + + {% endfor %} +
{{ header }}
{{ element|round(2) }}{{ element }}
+ {% if call_stack %} + call stack:
+ {{call_stack}} + {% endif %} +
+
\ No newline at end of file diff --git a/profiler/advisor/display/html/templates/main.html b/profiler/advisor/display/html/templates/main.html index 1a9392d2b2..f1703c7d8c 100644 --- a/profiler/advisor/display/html/templates/main.html +++ b/profiler/advisor/display/html/templates/main.html @@ -72,7 +72,7 @@ table { width: 100%; - table-layout: fixed; + table-layout: auto; border-collapse: collapse; margin-top: 2px; margin-bottom: 5px; @@ -82,6 +82,7 @@ padding: 10px; word-wrap: break-word; word-break: break-all; + white-space: nowrap; border: 1px solid rgb(170, 169, 169); text-align: left; } diff --git a/profiler/advisor/display/html/templates/timeline_analysis.html b/profiler/advisor/display/html/templates/timeline_analysis.html new file mode 100644 index 0000000000..b5ea891242 --- /dev/null +++ b/profiler/advisor/display/html/templates/timeline_analysis.html @@ -0,0 +1,34 @@ +
+

{{title|safe}}

+
+
+
+ {% if result.get("img") %} +
+ Image +
+ {% endif %} + + {% if result.get("current") %} + + {% endif %} + + {% if result.get("bottlenect") %} + + {% endif %} + + {% if result.get("advice") %} + + {% endif %} + +
+
+
+
-- Gitee From e2e693e2fb2fd4642bd3bef59f5e91584c2033af Mon Sep 17 00:00:00 2001 From: fanxiaotong Date: Tue, 16 Apr 2024 20:37:29 +0800 Subject: [PATCH 07/21] framework --- profiler/advisor/analyzer/base_analyzer.py | 18 +++ .../analyzer/cluster/slow_link_analyser.py | 104 ++++++++++++ .../analyzer/cluster/slow_rank_analyser.py | 101 ++++++++++++ .../fusion_ops/fusion_ops_analyzer.py | 20 +-- profiler/advisor/common/constant.py | 8 + profiler/advisor/common/module_lib.py | 87 ---------- .../dataset/cluster/cluster_dataset.py | 152 ++++++++++++++++++ .../cluster/cluster_step_trace_time_bean.py | 67 ++++++++ profiler/advisor/interface/interface.py | 84 ++++------ profiler/cli/analyze_cli.py | 25 +-- profiler/cluster_analyse/cluster_analysis.py | 4 +- 11 files changed, 496 insertions(+), 174 deletions(-) create mode 100644 profiler/advisor/analyzer/cluster/slow_link_analyser.py create mode 100644 profiler/advisor/analyzer/cluster/slow_rank_analyser.py delete mode 100644 profiler/advisor/common/module_lib.py create mode 100644 profiler/advisor/dataset/cluster/cluster_dataset.py create mode 100644 profiler/advisor/dataset/cluster/cluster_step_trace_time_bean.py diff --git a/profiler/advisor/analyzer/base_analyzer.py b/profiler/advisor/analyzer/base_analyzer.py index f698865266..ff945da5cf 100644 --- a/profiler/advisor/analyzer/base_analyzer.py +++ b/profiler/advisor/analyzer/base_analyzer.py @@ -1,7 +1,17 @@ from abc import abstractmethod, ABCMeta +from profiler.advisor.display.html.render import HTMLRender +dataset_cls_list = [] class BaseAnalyzer(metaclass=ABCMeta): + def __init__(self, collection_path, dataset_cls_list, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): + self.n_processes = n_processes + self.cann_version = cann_version + self.torch_version = torch_version + self.html_render = HTMLRender() + self.collection_path = collection_path + self.kwargs = kwargs + self.event_dataset_list = self.get_dataset_dict(dataset_cls_list) @abstractmethod def optimize(self): @@ -14,3 +24,11 @@ class BaseAnalyzer(metaclass=ABCMeta): @abstractmethod def make_render(self): pass + + def get_dataset_dict(self, dataset_cls_list): + datasets = {key: [] for key in dataset_cls_list} + + for dataset_cls in dataset_cls_list: + if dataset_cls and callable(dataset_cls): + datasets[dataset_cls] = dataset_cls(self.collection_path, **self.kwargs) + return datasets diff --git a/profiler/advisor/analyzer/cluster/slow_link_analyser.py b/profiler/advisor/analyzer/cluster/slow_link_analyser.py new file mode 100644 index 0000000000..d4212ada2c --- /dev/null +++ b/profiler/advisor/analyzer/cluster/slow_link_analyser.py @@ -0,0 +1,104 @@ +# Copyright (c) 2023, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import defaultdict +from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.advisor.result.result import OptimizeResult +from profiler.advisor.result.item import OptimizeItem, OptimizeRecord +from profiler.advisor.dataset.cluster.cluster_dataset import ClusterCommunicationDataSet + + +class SlowLinkAnalyzer(BaseAnalyzer): + RDMA_TIME_MS = "RDMA time(ms)" + RDMA_SIZE_MB = "RDMA size(mb)" + SDMA_TIME_MS = "SDMA time(ms)" + SDMA_SIZE_MB = "SDMA size(mb)" + RDMA_BANDWIDTH = "RDMA bandwidth(GB/s)" + SDMA_BANDWIDTH = "SDMA bandwidth(GB/s)" + COMMUNICATION_BANDWIDTH_INFO = "Communication Bandwidth Info" + TRANSIT_TIME = "Transit Time(ms)" + TRANSIT_SIZE = "Transit Size(MB)" + SDMA = "SDMA" + RDMA = "RDMA" + SLOW_LINK_ANALYSIS = "slow_link_analysis" + dataset_cls_list = [ClusterCommunicationDataSet] + + def __init__(self, collection_path, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): + super().__init__(collection_path, self.dataset_cls_list, n_processes, cann_version, torch_version, **kwargs) + self.communication_data_class = self.event_dataset_list[ClusterCommunicationDataSet] + self.rank_bw_dict = self.communication_data_class.get_data() + self.result = OptimizeResult() + self.bottelneck = '' + self.suggestion = '' + + def optimize(self, **kwargs): + self.process() + self.make_record() + self.make_render() + return self.result + + def process(self): + if self.rank_bw_dict: + self.produce_bottleneck(self.RDMA_BANDWIDTH) + self.produce_bottleneck(self.SDMA_BANDWIDTH) + + def produce_bottleneck(self, link_type: str): + data_list = [rank_dict.get(link_type, 0) for rank_id, rank_dict in self.rank_bw_dict.items()] + avg_bw = round(sum(data_list) / len(data_list), 3) + if avg_bw == 0: + return + self.bottelneck += f'{link_type}: \n' \ + f'The average is {avg_bw}, ' \ + f'while the maximum is {round(max(data_list), 3)}GB/s and ' \ + f'the minimum is {round(min(data_list), 3)}GB/s. ' \ + f'the difference is {round(max(data_list) - min(data_list), 3)}GB/s. \n' + + def format_details(self): + details_dict = {} + headers = ['rank_id'] + list(self.rank_bw_dict[0].keys()) + data_list = [] + for rank_id, rank_bw in self.rank_bw_dict.items(): + data_list.append([rank_id] + list(rank_bw.keys())) + + details_dict["headers"] = headers + details_dict["data"] = data_list + + return [details_dict] + + def make_record(self): + """ + make record for what and how to optimize + """ + optimization_item = OptimizeItem( + SlowLinkAnalyzer.SLOW_LINK_ANALYSIS, + self.bottelneck, + [""] + ) + self.result.add(OptimizeRecord(optimization_item)) + + def make_render(self): + result_for_html = { + "Description" : self.bottelneck, + "suggestion" : self.suggestion, + "details" : self.format_details() + } + + self.html_render.render_template(key="cluster", + title=SlowLinkAnalyzer.SLOW_LINK_ANALYSIS, + template_dir="templates", + template_name="cluster_analysis.html", + cann_version=self.cann_version, + torch_version=self.torch_version, + result=result_for_html) \ No newline at end of file diff --git a/profiler/advisor/analyzer/cluster/slow_rank_analyser.py b/profiler/advisor/analyzer/cluster/slow_rank_analyser.py new file mode 100644 index 0000000000..35b4663d38 --- /dev/null +++ b/profiler/advisor/analyzer/cluster/slow_rank_analyser.py @@ -0,0 +1,101 @@ +# Copyright (c) 2023, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import defaultdict +from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.advisor.result.result import OptimizeResult +from profiler.advisor.result.item import OptimizeItem, OptimizeRecord +from profiler.advisor.dataset.cluster.cluster_dataset import ClusterStepTraceTimeDataSet + + +class SlowRankAnalyzer(BaseAnalyzer): + SLOW_RANK_ANALYSIS = "slow_rank_analysis" + RANK = "rank" + RATIO_THRESHOLD = 0.05 + BOTTLENECK_LIST = ['Computing', 'Communication', "Free"] + dataset_cls_list = [ClusterStepTraceTimeDataSet] + + def __init__(self, collection_path, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): + super().__init__(collection_path, self.dataset_cls_list, n_processes, cann_version, torch_version, **kwargs) + self.step_trace_class = self.event_dataset_list[ClusterStepTraceTimeDataSet] + self.step_trace_dict = self.step_trace_class.get_data() + self.result = OptimizeResult() + self.bottelneck = '' + self.suggestion = '' + + def optimize(self, **kwargs): + self.process() + + self.make_record() + self.make_render() + return self.result + + def process(self): + total_time_list = [sum(data_tuple) for rank_id, data_tuple in self.step_trace_dict.items()] + if total_time_list: + mean_total_time = sum(total_time_list) / len(total_time_list) + for i in range(len(self.BOTTLENECK_LIST)): + self.produce_bottleneck(self.step_trace_dict, i, mean_total_time) + + def produce_bottleneck(self, step_dict: dict, produce_type: int, mean_total_time: float): + data_list = [data_tuple[produce_type] for rank_id, data_tuple in step_dict.items()] + max_ratio = self.compute_max_gap_ratio(data_list, mean_total_time) + if max_ratio > self.RATIO_THRESHOLD: + self.bottelneck += f'{self.BOTTLENECK_LIST[produce_type]} has some issues in the cluster, ' \ + f'because the max difference of {self.BOTTLENECK_LIST[produce_type]} time ' \ + f'has reached {round(max_ratio * mean_total_time / 1000, 3)}ms. \n' + + def make_record(self): + """ + make record for what and how to optimize + """ + optimization_item = OptimizeItem( + SlowRankAnalyzer.SLOW_RANK_ANALYSIS, + self.bottelneck, + [""] + ) + self.result.add(OptimizeRecord(optimization_item)) + + def format_details(self): + details_dict = {} + headers = ["rank_id", "comupte", "communication", "free"] + data_list = [] + for key,value in self.step_trace_dict.items(): + data_list.append([key] + value) + details_dict["headers"] = headers + details_dict["data"] = data_list + return [details_dict] + + def make_render(self): + result_for_html = { + "Description" : self.bottelneck, + "suggestion" : self.suggestion, + "details" : self.format_details() + } + + self.html_render.render_template(key="cluster", + title=SlowRankAnalyzer.SLOW_RANK_ANALYSIS, + template_dir="templates", + template_name="cluster_analysis.html", + cann_version=self.cann_version, + torch_version=self.torch_version, + result=result_for_html) + + @staticmethod + def compute_max_gap_ratio(data: list, mean: float): + if mean == 0: + return 0 + else: + return (max(data) - min(data)) / mean diff --git a/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py b/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py index 3db3ae55a1..ca10dcb1f5 100644 --- a/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py +++ b/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py @@ -11,38 +11,32 @@ from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset from profiler.advisor.result.result import OptimizeResult from profiler.advisor.result.item import OptimizeItem, OptimizeRecord from profiler.advisor.utils.utils import format_timeline_result -from profiler.advisor.display.html.render import HTMLRender from profiler.advisor.utils.utils import init_timeline_ops_db logger = logging.getLogger() class TimelineFusionOpsAnalyzer(BaseAnalyzer): + dataset_cls_list = [TimelineEventDataset] - def __init__(self, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): - self.n_processes = n_processes + def __init__(self, collection_path, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): + super().__init__(collection_path, self.dataset_cls_list, n_processes, cann_version, torch_version, **kwargs) self._matched_op_index = {} if self.n_processes <= 1 else multiprocessing.Manager().dict() self.matched_op_stacks = {} - self.cann_version = cann_version - self.torch_version = torch_version self.empty_stacks = True - self.event_dataset = None - self.html_render = HTMLRender() self.result = OptimizeResult() + self.timeline_event_dataset = self.event_dataset_list[TimelineEventDataset] - def optimize(self, timeline_dataset, **kwargs): - - self.event_dataset = timeline_dataset - + def optimize(self, **kwargs): for mode in [const.ATEN.lower(), const.OPTIMIZER.lower()]: for op_combined, npu_apis in tqdm(getattr(init_timeline_ops_db(self.cann_version, self.torch_version), f"_{mode}_op_api_map").items(), leave=False, ncols=100, desc="Scanning timeline for affinity apis"): for npu_api in npu_apis.split("/"): - self.find_fusion_ops(self.event_dataset, op_combined, npu_api, mode) + self.find_fusion_ops(self.timeline_event_dataset, op_combined, npu_api, mode) - self.query_stack(self.event_dataset) + self.query_stack(self.timeline_event_dataset) logger.info("Finish timeline analysis") self.make_record() diff --git a/profiler/advisor/common/constant.py b/profiler/advisor/common/constant.py index 9703e78c00..df12fd76d3 100644 --- a/profiler/advisor/common/constant.py +++ b/profiler/advisor/common/constant.py @@ -104,3 +104,11 @@ DEFAULT_RULE_PATH = "./rules/" TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID = -1 DEFAULT_TEMPLATE_HEADER = "Performance Optimization Suggestions" + +PT_PROF_SUFFIX = "ascend_pt" +ASCEND_PROFILER_OUTPUT = "ASCEND_PROFILER_OUTPUT" +COLLECTION_PATH = "collection_path" +CLUSTER_ANALYSIS_OUTPUT = "cluster_analysis_output" +KERNEL_DETAILS_CSV = "kernel_details.csv" +CLUSTER_STEP_TIME_CSV = "cluster_step_trace_time.csv" +CLUSTER_COMM_JSON = "cluster_communication.json" diff --git a/profiler/advisor/common/module_lib.py b/profiler/advisor/common/module_lib.py deleted file mode 100644 index 697e37f736..0000000000 --- a/profiler/advisor/common/module_lib.py +++ /dev/null @@ -1,87 +0,0 @@ -import logging - -from profiler.advisor.analyzer.scheduling.fusion_ops.fusion_ops_analyzer import TimelineFusionOpsAnalyzer -from profiler.advisor.analyzer.overall.overall_analyzer import OverallSummaryAnalyzer - -from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset - -logger = logging.getLogger() - - -class AnalysisScope: - supported_dims = ["computing", "scheduling", "communication", "overall", "dataloader"] - - @staticmethod - def get_analyzer(dimension, analyzer_name, is_inference=False): - if is_inference: - return getattr(InferenceAnalysisScope, dimension)().get(analyzer_name) - return getattr(TrainAnalysisScope, dimension)().get(analyzer_name) - - @staticmethod - def analyzer_list(dim=None, is_inference=False): - analyzer_list = [] - dims = [dim] if dim else AnalysisScope.supported_dims - for dim in dims: - analyzer_list += list(getattr(InferenceAnalysisScope, dim)().keys()) if is_inference else list( - getattr(TrainAnalysisScope, dim)().keys()) - return analyzer_list - - -class TrainAnalysisScope(AnalysisScope): - - @staticmethod - def computing(): - return dict() - - @staticmethod - def scheduling(): - return dict( - timeline_fusion_ops=TimelineFusionOpsAnalyzer - ) - - @staticmethod - def communication(): - return dict() - - @staticmethod - def overall(): - return dict( - overall_summary=OverallSummaryAnalyzer - ) - - @staticmethod - def dataloader(): - return dict() - - -class InferenceAnalysisScope(AnalysisScope): - @staticmethod - def computing(): - return dict() - - @staticmethod - def scheduling(): - return dict() - - @staticmethod - def communication(): - return dict() - - @staticmethod - def overall(): - return dict() - - @staticmethod - def dataloader(): - return dict() - - -class AnalyzerToDataset: - analyzer_to_dataset = { - "overall_summary": [], - "timeline_fusion_ops": [TimelineEventDataset] - } - - @staticmethod - def get_dataset(analyzer_name): - return AnalyzerToDataset.analyzer_to_dataset.get(analyzer_name) diff --git a/profiler/advisor/dataset/cluster/cluster_dataset.py b/profiler/advisor/dataset/cluster/cluster_dataset.py new file mode 100644 index 0000000000..ee8b3563b7 --- /dev/null +++ b/profiler/advisor/dataset/cluster/cluster_dataset.py @@ -0,0 +1,152 @@ +import logging + +import os + +from profiler.advisor.utils.utils import singleton +from profiler.cluster_analyse.common_func.file_manager import FileManager +from profiler.advisor.common import constant as const +from profiler.cluster_analyse.common_func.constant import Constant +from collections import defaultdict +from profiler.cluster_analyse.cluster_analysis import ClusterAnalysis +from profiler.advisor.dataset.cluster.cluster_step_trace_time_bean import ClusterStepTraceTimeBean + +logger = logging.getLogger() + + +class ClusterDataset: + + def __init__(self, collection_path, **kwargs) -> None: + self.collection_path = os.path.realpath(collection_path) + if not self.is_cluster_analysis_output_exist(): + self.cluster_analyze() + + def is_cluster_analysis_output_exist(self): + """ + check whether input path is valid + """ + for file in os.listdir(self.collection_path): + if file == 'cluster_analysis_output': + print("[INFO]Cluster has been analyzed " + "because of the existence of cluster analysis output directory.") + print("[INFO]Skip Cluster analyze backend.") + return True + return False + + def cluster_analyze(self): + parameter = { + Constant.COLLECTION_PATH: self.collection_path, + Constant.ANALYSIS_MODE: "all" + } + print("[INFO] cluster analysis is in the process, please wait...") + try: + ClusterAnalysis(parameter).run() + except Exception as e: + raise ValueError(f"Cluster analyze backend failed:{e}") from e + + def load_csv_data(self, file_name, dataBean): + csv_path = os.path.join(self.collection_path, const.CLUSTER_ANALYSIS_OUTPUT, file_name) + if not os.path.exists(csv_path): + msg = "[ERROR] cluster_step_trace_time.csv doesn't exist, terminate analysis." + raise RuntimeError(msg) + data = FileManager.read_csv_file(csv_path, dataBean) + return data + + def load_json_data(self, file_name): + json_path = os.path.join(self.collection_path, const.CLUSTER_ANALYSIS_OUTPUT, file_name) + if not os.path.exists(json_path): + msg = "[ERROR] cluster_communication.json doesn't exist, terminate analysis." + raise RuntimeError(msg) + data = FileManager.read_json_file(json_path) + return data + + +@singleton +class ClusterStepTraceTimeDataSet(ClusterDataset): + RANK = "rank" + + def __init__(self, collection_path: str, kwargs: dict = None): + super().__init__(collection_path) + self._step_dict = defaultdict() + self.parse() + + def parse(self): + step_data = self.load_csv_data(const.CLUSTER_STEP_TIME_CSV, ClusterStepTraceTimeBean) + self._step_dict = self.formate_data(step_data) + + def formate_data(self, step_data: list): + step_dict = defaultdict(lambda: [0, 0, 0]) + for step_bean in step_data: + if step_bean.type == self.RANK: + step_dict[step_bean.index][0] += step_bean.compute + step_dict[step_bean.index][1] += step_bean.communication + step_dict[step_bean.index][2] += step_bean.free + return step_dict + + def get_data(self): + return self._step_dict + + +@singleton +class ClusterCommunicationDataSet(ClusterDataset): + RDMA_TIME_MS = "RDMA time(ms)" + RDMA_SIZE_MB = "RDMA size(mb)" + SDMA_TIME_MS = "SDMA time(ms)" + SDMA_SIZE_MB = "SDMA size(mb)" + RDMA_BANDWIDTH = "RDMA bandwidth(GB/s)" + SDMA_BANDWIDTH = "SDMA bandwidth(GB/s)" + COMMUNICATION_BANDWIDTH_INFO = "Communication Bandwidth Info" + TRANSIT_TIME = "Transit Time(ms)" + TRANSIT_SIZE = "Transit Size(MB)" + SDMA = "SDMA" + RDMA = "RDMA" + + def __init__(self, collection_path: str, kwargs: dict = None): + super().__init__(collection_path) + self.rank_bw_dict = defaultdict(lambda: { + self.RDMA_TIME_MS: 0, + self.RDMA_SIZE_MB: 0, + self.SDMA_TIME_MS: 0, + self.SDMA_SIZE_MB: 0, + }) + self.parse() + + @staticmethod + def compute_ratio(dividend: float, divisor: float): + if abs(divisor) < 1e-15: + return 0 + else: + return round(dividend / divisor, 4) + + def parse(self): + communication_json = self.load_json_data(const.CLUSTER_COMM_JSON) + self.process(communication_json) + + def process(self, communication_json: dict): + for comm_group, group_dict in communication_json.items(): + for step, step_dict in group_dict.items(): + for op, op_dict in step_dict.items(): + self.compute_bandwidth(op_dict) + + def compute_bandwidth(self, op_dict: dict): + for rank_id, rank_dict in op_dict.items(): + try: + rank = int(rank_id) + except ValueError as e: + msg = "[ERROR] Cluster_communication.json has invalid structure." + raise ValueError(msg) from e + for comm_type, bw_dict in rank_dict.get(self.COMMUNICATION_BANDWIDTH_INFO, {}).items(): + if comm_type == self.SDMA: + self.rank_bw_dict[rank][self.SDMA_SIZE_MB] += bw_dict.get(self.TRANSIT_SIZE) + self.rank_bw_dict[rank][self.SDMA_TIME_MS] += bw_dict.get(self.TRANSIT_TIME) + if comm_type == self.RDMA: + self.rank_bw_dict[rank][self.RDMA_SIZE_MB] += bw_dict.get(self.TRANSIT_SIZE) + self.rank_bw_dict[rank][self.RDMA_TIME_MS] += bw_dict.get(self.TRANSIT_TIME) + + for rank, rank_dict in self.rank_bw_dict.items(): + self.rank_bw_dict[rank][self.RDMA_BANDWIDTH] = self.compute_ratio( + self.rank_bw_dict[rank][self.RDMA_SIZE_MB], self.rank_bw_dict[rank][self.RDMA_TIME_MS]) + self.rank_bw_dict[rank][self.SDMA_BANDWIDTH] = self.compute_ratio( + self.rank_bw_dict[rank][self.SDMA_SIZE_MB], self.rank_bw_dict[rank][self.SDMA_TIME_MS]) + + def get_data(self): + return self.rank_bw_dict diff --git a/profiler/advisor/dataset/cluster/cluster_step_trace_time_bean.py b/profiler/advisor/dataset/cluster/cluster_step_trace_time_bean.py new file mode 100644 index 0000000000..b108fc77a3 --- /dev/null +++ b/profiler/advisor/dataset/cluster/cluster_step_trace_time_bean.py @@ -0,0 +1,67 @@ +# Copyright (c) 2023, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class ClusterStepTraceTimeBean: + STEP = "Step" + TYPE = "Type" + INDEX = "Index" + COMPUTING = "Computing" + COMMUNICATION = "Communication(Not Overlapped)" + FREE = "Free" + + def __init__(self, data: dict): + self._data = data + + @property + def step(self) -> str: + return self._data.get(self.STEP, '') + + @property + def type(self) -> str: + return self._data.get(self.TYPE, '') + + @property + def index(self) -> int: + try: + return int(self._data.get(self.INDEX)) + except ValueError as e: + msg = "[ERROR] Cluster step trace time.csv has invalid value in column 'Index'." + raise ValueError(msg) from e + + @property + def compute(self) -> float: + try: + return float(self._data.get(self.COMPUTING, '')) + except ValueError as e: + msg = "[ERROR] Cluster step trace time.csv has invalid value in column 'Computing'." + raise ValueError(msg) from e + + @property + def communication(self) -> float: + try: + return float(self._data.get(self.COMMUNICATION, '')) + except ValueError as e: + msg = "[ERROR] Cluster step trace time.csv has invalid value in column 'Communication'." + raise ValueError(msg) from e + + @property + def free(self) -> float: + try: + return float(self._data.get(self.FREE, '')) + except ValueError as e: + msg = "[ERROR] Cluster step trace time.csv has invalid value in column 'Free'." + raise ValueError(msg) from e + diff --git a/profiler/advisor/interface/interface.py b/profiler/advisor/interface/interface.py index 4a2eae244a..19da350a02 100644 --- a/profiler/advisor/interface/interface.py +++ b/profiler/advisor/interface/interface.py @@ -1,66 +1,48 @@ import os -from profiler.advisor.common.module_lib import AnalysisScope, AnalyzerToDataset +from profiler.advisor.analyzer.scheduling.fusion_ops.fusion_ops_analyzer import TimelineFusionOpsAnalyzer +from profiler.advisor.analyzer.overall.overall_analyzer import OverallSummaryAnalyzer +from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset from profiler.advisor.utils.utils import Timer +from profiler.advisor.result.result import OptimizeResult +from profiler.advisor.analyzer.cluster.slow_rank_analyser import SlowRankAnalyzer +from profiler.advisor.analyzer.cluster.slow_link_analyser import SlowLinkAnalyzer class Interface: + supported_analysiser = { + "computing": [], + "scheduling": [TimelineFusionOpsAnalyzer], + "communication": [], + "overall": [], + "dataloader": [], + "cluster": [SlowRankAnalyzer, SlowLinkAnalyzer] + } + + all_dimension = supported_analysiser.keys() + def __init__(self, **kwargs): self.collection_path = os.path.realpath(kwargs.get("profiling_path")) - self._analyzer_controller = AnalyzerController(**kwargs) - self._dataset_controller = DatasetController(collection_path=self.collection_path, **kwargs) - def get_result(self: any, dimension: str, mode: str=None, render_html=False, **kwargs): + @staticmethod + def get_analyzer(dimension, is_inference=False): + return Interface.supported_analysiser.get(dimension, []) + + def get_result(self: any, dimension: str, render_html=False, **kwargs): """ :Param mode: affinity apis, ai cpu and so on. """ - analyzer = self._analyzer_controller.create_analyzer(dimension, mode, kwargs.get("is_inference", False)) - - datasets = self._dataset_controller.create_dataset(mode) - if not analyzer: - return - - if datasets: - result = analyzer.optimize(*datasets) - else: - result = analyzer.optimize() - - if render_html: - if hasattr(analyzer, "html_render"): - analyzer.html_render.render_html() - analyzer.html_render.save_to_file(f'att_advisor_{Timer().strftime}.html') - - return result - - -class AnalyzerController: - - def __init__(self, **kwargs): - self.temp_input_path = None - self.kwargs = kwargs - - def create_analyzer(self, dimension, mode: str, is_inference=False): - clss = AnalysisScope.get_analyzer(dimension, mode, is_inference) - if clss and callable(clss): - return clss(**self.kwargs) - return None - - -class DatasetController: - - def __init__(self, **kwargs): - self.collection_path = kwargs.get("collection_path") - self.kwargs = kwargs - - def create_dataset(self, mode): - dataset_cls_list = AnalyzerToDataset.get_dataset(mode) - - datasets = [] - for dataset_cls in dataset_cls_list: - if dataset_cls and callable(dataset_cls): - datasets.append(dataset_cls(self.collection_path, **self.kwargs)) - - return datasets + result_list = [] + analysiser_list = self.get_analyzer(dimension, kwargs.get("is_inference", False)) + for idx, clss in enumerate(analysiser_list): + if clss and callable(clss): + analysiser = clss(collection_path = self.collection_path, **kwargs) + result_list.append(analysiser.optimize()) + if render_html and idx == len(analysiser_list) - 1: + if hasattr(analysiser, "html_render"): + analysiser.html_render.render_html() + analysiser.html_render.save_to_file(f'att_advisor_{Timer().strftime}.html') + return result_list if __name__ == "__main__": diff --git a/profiler/cli/analyze_cli.py b/profiler/cli/analyze_cli.py index 34215d12d1..2efecffcb7 100644 --- a/profiler/cli/analyze_cli.py +++ b/profiler/cli/analyze_cli.py @@ -9,38 +9,21 @@ sys.path.append(os.path.join(os.path.dirname(os.path.dirname(__file__)), "cluste from profiler.advisor.utils.tools import CONTEXT_SETTINGS, ClickAliasedGroup from profiler.advisor.common import constant from profiler.advisor.utils.utils import debug_option -from profiler.advisor.common.module_lib import AnalysisScope from profiler.advisor.interface.interface import Interface logger = logging.getLogger() def _analyze(dimensions, **kwargs): - is_inference = kwargs.get("is_inference", False) - user_input_mode = kwargs.get("mode") result_list = [] job_list = [] for dimension in dimensions: - - valid_modes = AnalysisScope.analyzer_list(dimension, is_inference) - if not valid_modes: - logger.info("Skip analysis of dimension %s, no analyzer", dimension) - continue - if user_input_mode and user_input_mode not in valid_modes: - logger.error("Got error mode %s for analysis dimension %s, optionals are %s", user_input_mode, dimension, - valid_modes) - continue - - analysis_modes = [user_input_mode] if user_input_mode else valid_modes - - for mode in analysis_modes: interface = Interface(**kwargs) - job_list.append((dimension, mode, interface)) + job_list.append((dimension, interface)) - for i, (dimension, mode, interface) in enumerate(job_list[::-1]): - result_list.append( - interface.get_result(dimension, mode, render_html=i == len(job_list) - 1, is_inference=is_inference)) + for i, (dimension, interface) in enumerate(job_list[::-1]): + result_list += interface.get_result(dimension, render_html=i == len(job_list) - 1) for result in result_list[::-1]: if result and hasattr(result, "show"): @@ -76,7 +59,7 @@ def analyze_all(**kwargs) -> None: if not kwargs.get("benchmark_profiling_path"): kwargs["benchmark_profiling_path"] = kwargs.get("profiling_path") - _analyze(AnalysisScope.supported_dims, **kwargs) + _analyze(Interface.all_dimension, **kwargs) @analyze_cli.command(context_settings=CONTEXT_SETTINGS, diff --git a/profiler/cluster_analyse/cluster_analysis.py b/profiler/cluster_analyse/cluster_analysis.py index 2445462211..fd127fdc03 100644 --- a/profiler/cluster_analyse/cluster_analysis.py +++ b/profiler/cluster_analyse/cluster_analysis.py @@ -25,7 +25,7 @@ from common_func.path_manager import PathManager from analysis.analysis_facade import AnalysisFacade -class Interface: +class ClusterAnalysis: ASCEND_PT = "ascend_pt" ASCEND_MS = "ascend_ms" @@ -88,4 +88,4 @@ if __name__ == "__main__": Constant.COLLECTION_PATH: args_parsed.collection_path, Constant.ANALYSIS_MODE: args_parsed.mode } - Interface(parameter).run() + ClusterAnalysis(parameter).run() -- Gitee From 67193fae1648f85581cd027f5103da5c467d3c98 Mon Sep 17 00:00:00 2001 From: PersonalC Date: Wed, 8 May 2024 19:35:20 +0800 Subject: [PATCH 08/21] =?UTF-8?q?att=20advisor=E6=96=B0=E5=A2=9Edataset?= =?UTF-8?q?=E6=A1=86=E6=9E=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- profiler/advisor/analyzer/base_analyzer.py | 73 +- .../analyzer/cluster/slow_link_analyser.py | 10 +- .../analyzer/cluster/slow_rank_analyser.py | 10 +- .../{computing => computation}/__init__.py | 0 .../aicpu/__init__.py | 0 .../bound/__init__.py | 0 .../op_compile/__init__.py | 0 .../analyzer/overall/overall_analyzer.py | 1 + .../{scheduling => schedule}/__init__.py | 0 .../free_event/__init__.py | 0 .../fusion_ops/__init__.py | 0 .../fusion_ops/fusion_ops_analyzer.py | 542 ++++----- profiler/advisor/common/constant.py | 12 +- .../advisor/common/timeline/fusion_ops_db.py | 340 +----- .../common/timeline/fusion_ops_rule.py | 110 ++ .../timeline/fusion_ops_rule_handler.py | 193 +++ profiler/advisor/config/config.ini | 8 +- .../dataset/cluster/cluster_dataset.py | 22 +- profiler/advisor/dataset/dataset.py | 38 + .../advisor/dataset/timeline_event_dataset.py | 26 +- profiler/advisor/interface/interface.py | 33 +- profiler/advisor/utils/utils.py | 1048 +++++++++-------- profiler/cli/analyze_cli.py | 72 +- profiler/cli/entrance.py | 2 +- profiler/cli/update_cli.py | 40 + requirements/{test.txt => tests.txt} | 8 +- version.txt | 2 +- 27 files changed, 1413 insertions(+), 1177 deletions(-) rename profiler/advisor/analyzer/{computing => computation}/__init__.py (100%) rename profiler/advisor/analyzer/{computing => computation}/aicpu/__init__.py (100%) rename profiler/advisor/analyzer/{computing => computation}/bound/__init__.py (100%) rename profiler/advisor/analyzer/{computing => computation}/op_compile/__init__.py (100%) rename profiler/advisor/analyzer/{scheduling => schedule}/__init__.py (100%) rename profiler/advisor/analyzer/{scheduling => schedule}/free_event/__init__.py (100%) rename profiler/advisor/analyzer/{scheduling => schedule}/fusion_ops/__init__.py (100%) rename profiler/advisor/analyzer/{scheduling => schedule}/fusion_ops/fusion_ops_analyzer.py (91%) create mode 100644 profiler/advisor/common/timeline/fusion_ops_rule.py create mode 100644 profiler/advisor/common/timeline/fusion_ops_rule_handler.py create mode 100644 profiler/advisor/dataset/dataset.py create mode 100644 profiler/cli/update_cli.py rename requirements/{test.txt => tests.txt} (95%) diff --git a/profiler/advisor/analyzer/base_analyzer.py b/profiler/advisor/analyzer/base_analyzer.py index ff945da5cf..6f9438ba75 100644 --- a/profiler/advisor/analyzer/base_analyzer.py +++ b/profiler/advisor/analyzer/base_analyzer.py @@ -1,17 +1,60 @@ +import logging +from functools import wraps +from typing import Dict, List, Union from abc import abstractmethod, ABCMeta + +from profiler.advisor.common import constant +from profiler.advisor.common.version_control import VersionControl +from profiler.advisor.dataset.dataset import Dataset +from profiler.advisor.result.result import OptimizeResult from profiler.advisor.display.html.render import HTMLRender -dataset_cls_list = [] +logger = logging.getLogger() + + +class BaseAnalyzer(VersionControl, metaclass=ABCMeta): + _SUPPORT_VERSIONS = constant.SUPPORTED_CANN_VERSION -class BaseAnalyzer(metaclass=ABCMeta): - def __init__(self, collection_path, dataset_cls_list, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): + dataset_cls_list = [] + + def __init__(self, collection_path, n_processes: int = 1, cann_version=constant.DEFAULT_CANN_VERSION, + torch_version=constant.DEFAULT_TORCH_VERSION, **kwargs): self.n_processes = n_processes self.cann_version = cann_version self.torch_version = torch_version self.html_render = HTMLRender() self.collection_path = collection_path self.kwargs = kwargs - self.event_dataset_list = self.get_dataset_dict(dataset_cls_list) + self.dataset_list: Dict[str, List[Dataset]] = {} + self.init_dataset_list() + self.result = OptimizeResult() + self.record_list: Dict[str, List] = {} + + @classmethod + def check_data(cls, data_list: tuple): + """ + check if all data in data list is contained + :param data_list: data list to check + :return: func ptr if check success + """ + + def decorate(func): + + @wraps(func) + def wrapper(self): + data = self.dataset_list + if data is None: + return None + for data_key in data_list: + if data_key not in data: + return None + + logger.info("Enable analysis %s with %s", self.__class__.__name__, ",".join(data_list)) + return func(self, data) + + return wrapper + + return decorate @abstractmethod def optimize(self): @@ -25,10 +68,24 @@ class BaseAnalyzer(metaclass=ABCMeta): def make_render(self): pass - def get_dataset_dict(self, dataset_cls_list): - datasets = {key: [] for key in dataset_cls_list} + def init_dataset_list(self)->None: + dataset_cls_list = self.dataset_cls_list + if len(dataset_cls_list) == 0: + logger.warning(f"Analyser: %s don't rely on any dataset!", self.__class__.__name__) + return for dataset_cls in dataset_cls_list: if dataset_cls and callable(dataset_cls): - datasets[dataset_cls] = dataset_cls(self.collection_path, **self.kwargs) - return datasets + dataset_cls(collection_path=self.collection_path, data=self.dataset_list, **self.kwargs) + + @staticmethod + def get_first_data_by_key(data, key) -> Union[Dataset, None]: + """ + get the first member from data with key + :param data: input data + :param key: data key + :return: the first dataset in dataset list + """ + if key in data and len(data[key]) > 0: + return data[key][0] + return None diff --git a/profiler/advisor/analyzer/cluster/slow_link_analyser.py b/profiler/advisor/analyzer/cluster/slow_link_analyser.py index d4212ada2c..7da50a8180 100644 --- a/profiler/advisor/analyzer/cluster/slow_link_analyser.py +++ b/profiler/advisor/analyzer/cluster/slow_link_analyser.py @@ -14,7 +14,9 @@ # limitations under the License. from collections import defaultdict +from typing import Dict, List from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.advisor.common import constant from profiler.advisor.result.result import OptimizeResult from profiler.advisor.result.item import OptimizeItem, OptimizeRecord from profiler.advisor.dataset.cluster.cluster_dataset import ClusterCommunicationDataSet @@ -35,9 +37,11 @@ class SlowLinkAnalyzer(BaseAnalyzer): SLOW_LINK_ANALYSIS = "slow_link_analysis" dataset_cls_list = [ClusterCommunicationDataSet] - def __init__(self, collection_path, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): - super().__init__(collection_path, self.dataset_cls_list, n_processes, cann_version, torch_version, **kwargs) - self.communication_data_class = self.event_dataset_list[ClusterCommunicationDataSet] + def __init__(self, collection_path, n_processes: int = 1, cann_version=constant.DEFAULT_CANN_VERSION, + torch_version=constant.DEFAULT_TORCH_VERSION, **kwargs): + super().__init__(collection_path, n_processes, cann_version, torch_version, **kwargs) + key = ClusterCommunicationDataSet.get_key() + self.communication_data_class = self.get_first_data_by_key(self.dataset_list, key) self.rank_bw_dict = self.communication_data_class.get_data() self.result = OptimizeResult() self.bottelneck = '' diff --git a/profiler/advisor/analyzer/cluster/slow_rank_analyser.py b/profiler/advisor/analyzer/cluster/slow_rank_analyser.py index 35b4663d38..b49ef5ec8c 100644 --- a/profiler/advisor/analyzer/cluster/slow_rank_analyser.py +++ b/profiler/advisor/analyzer/cluster/slow_rank_analyser.py @@ -14,7 +14,9 @@ # limitations under the License. from collections import defaultdict +from typing import Dict, List from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.advisor.common import constant from profiler.advisor.result.result import OptimizeResult from profiler.advisor.result.item import OptimizeItem, OptimizeRecord from profiler.advisor.dataset.cluster.cluster_dataset import ClusterStepTraceTimeDataSet @@ -27,9 +29,11 @@ class SlowRankAnalyzer(BaseAnalyzer): BOTTLENECK_LIST = ['Computing', 'Communication', "Free"] dataset_cls_list = [ClusterStepTraceTimeDataSet] - def __init__(self, collection_path, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): - super().__init__(collection_path, self.dataset_cls_list, n_processes, cann_version, torch_version, **kwargs) - self.step_trace_class = self.event_dataset_list[ClusterStepTraceTimeDataSet] + def __init__(self, collection_path, n_processes: int = 1, cann_version=constant.DEFAULT_CANN_VERSION, + torch_version=constant.DEFAULT_TORCH_VERSION, **kwargs): + super().__init__(collection_path, n_processes, cann_version, torch_version, **kwargs) + key = ClusterStepTraceTimeDataSet.get_key() + self.step_trace_class = self.get_first_data_by_key(self.dataset_list, key) self.step_trace_dict = self.step_trace_class.get_data() self.result = OptimizeResult() self.bottelneck = '' diff --git a/profiler/advisor/analyzer/computing/__init__.py b/profiler/advisor/analyzer/computation/__init__.py similarity index 100% rename from profiler/advisor/analyzer/computing/__init__.py rename to profiler/advisor/analyzer/computation/__init__.py diff --git a/profiler/advisor/analyzer/computing/aicpu/__init__.py b/profiler/advisor/analyzer/computation/aicpu/__init__.py similarity index 100% rename from profiler/advisor/analyzer/computing/aicpu/__init__.py rename to profiler/advisor/analyzer/computation/aicpu/__init__.py diff --git a/profiler/advisor/analyzer/computing/bound/__init__.py b/profiler/advisor/analyzer/computation/bound/__init__.py similarity index 100% rename from profiler/advisor/analyzer/computing/bound/__init__.py rename to profiler/advisor/analyzer/computation/bound/__init__.py diff --git a/profiler/advisor/analyzer/computing/op_compile/__init__.py b/profiler/advisor/analyzer/computation/op_compile/__init__.py similarity index 100% rename from profiler/advisor/analyzer/computing/op_compile/__init__.py rename to profiler/advisor/analyzer/computation/op_compile/__init__.py diff --git a/profiler/advisor/analyzer/overall/overall_analyzer.py b/profiler/advisor/analyzer/overall/overall_analyzer.py index 93b227fb61..e31a5d4288 100644 --- a/profiler/advisor/analyzer/overall/overall_analyzer.py +++ b/profiler/advisor/analyzer/overall/overall_analyzer.py @@ -1,4 +1,5 @@ import logging +from typing import Dict, List from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer from profiler.advisor.display.html.render import HTMLRender diff --git a/profiler/advisor/analyzer/scheduling/__init__.py b/profiler/advisor/analyzer/schedule/__init__.py similarity index 100% rename from profiler/advisor/analyzer/scheduling/__init__.py rename to profiler/advisor/analyzer/schedule/__init__.py diff --git a/profiler/advisor/analyzer/scheduling/free_event/__init__.py b/profiler/advisor/analyzer/schedule/free_event/__init__.py similarity index 100% rename from profiler/advisor/analyzer/scheduling/free_event/__init__.py rename to profiler/advisor/analyzer/schedule/free_event/__init__.py diff --git a/profiler/advisor/analyzer/scheduling/fusion_ops/__init__.py b/profiler/advisor/analyzer/schedule/fusion_ops/__init__.py similarity index 100% rename from profiler/advisor/analyzer/scheduling/fusion_ops/__init__.py rename to profiler/advisor/analyzer/schedule/fusion_ops/__init__.py diff --git a/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py b/profiler/advisor/analyzer/schedule/fusion_ops/fusion_ops_analyzer.py similarity index 91% rename from profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py rename to profiler/advisor/analyzer/schedule/fusion_ops/fusion_ops_analyzer.py index ca10dcb1f5..4259db093b 100644 --- a/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py +++ b/profiler/advisor/analyzer/schedule/fusion_ops/fusion_ops_analyzer.py @@ -1,271 +1,271 @@ -import multiprocessing -import logging -import re - -from tqdm import tqdm - -from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer -from profiler.advisor.common import constant as const -from profiler.advisor.common.timeline.event import TimelineEvent -from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset -from profiler.advisor.result.result import OptimizeResult -from profiler.advisor.result.item import OptimizeItem, OptimizeRecord -from profiler.advisor.utils.utils import format_timeline_result -from profiler.advisor.utils.utils import init_timeline_ops_db - -logger = logging.getLogger() - - -class TimelineFusionOpsAnalyzer(BaseAnalyzer): - dataset_cls_list = [TimelineEventDataset] - - def __init__(self, collection_path, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): - super().__init__(collection_path, self.dataset_cls_list, n_processes, cann_version, torch_version, **kwargs) - self._matched_op_index = {} if self.n_processes <= 1 else multiprocessing.Manager().dict() - self.matched_op_stacks = {} - self.empty_stacks = True - self.result = OptimizeResult() - self.timeline_event_dataset = self.event_dataset_list[TimelineEventDataset] - - def optimize(self, **kwargs): - for mode in [const.ATEN.lower(), const.OPTIMIZER.lower()]: - - for op_combined, npu_apis in tqdm(getattr(init_timeline_ops_db(self.cann_version, self.torch_version), - f"_{mode}_op_api_map").items(), leave=False, ncols=100, - desc="Scanning timeline for affinity apis"): - for npu_api in npu_apis.split("/"): - self.find_fusion_ops(self.timeline_event_dataset, op_combined, npu_api, mode) - - self.query_stack(self.timeline_event_dataset) - - logger.info("Finish timeline analysis") - self.make_record() - self.make_render() - return self.result - - def find_fusion_ops(self, event_dataset: TimelineEventDataset, ops: str, npu_api: str, mode: str): - """ - :Param event_dataset: dataset of timeline event - :Param ops: operator combination with '-' as separator , e.g. permute-reshape - :Param npu_api: api of torch_npu, generally more efficient than torch api - :Param mode: aten or dequeue or optimizer - :Return: json of op_name and called times and detail stacks - """ - op_rule_pattern, enable_regex = self._format_rule_to_pattern(ops) - if not enable_regex: - self._match_ops(event_dataset, op_rule_pattern, npu_api, mode) - else: - try: - self._match_ops_with_regex(event_dataset, op_rule_pattern, npu_api, mode) - except Exception as e: - logger.warning("Failed to find fusion operators with regex %s, reason is %s", ops, e) - - def _match_ops(self, event_dataset: TimelineEventDataset, ops: str, npu_api: str, mode: str): - """ match operator based on fusion operators rule(without regex), - only strictly equals of op name list means matched - :Param event_dataset: dataset of timeline event - :Param ops: operator combination with '-' as separator , e.g. permute-reshape - :Param npu_api: api of torch_npu, generally more efficient than torch api - :Param mode: aten or dequeue or optimizer - """ - op_list = ops.split(const.OP_SEP) - - matched_op_index = set() - api_ops_matched = False - - for index, event in enumerate(getattr(event_dataset, mode)): - if self._replace_op_name_prefix(event.name, mode) != op_list[0]: - continue - tmp_dequeue_event_names = [self._replace_op_name_prefix(event.name, mode) for event in - getattr(event_dataset, mode)[index: index + len(op_list)]] - if tmp_dequeue_event_names != op_list: - continue - api_ops_matched = True - matched_op_index.add(event.dataset_index) - - if api_ops_matched: - self._matched_op_index[npu_api + f":{ops}"] = matched_op_index - - def _match_ops_with_regex(self, event_dataset: TimelineEventDataset, op_rule_pattern: str, npu_api: str, - mode: str): - """ match operator based on fusion operators rule(with regex), - using regex to support condition like 'a = torch.mul(xxx) if xxx else torch.add(xxx)' - :Param event_dataset: dataset of timeline event - :Param op_rule_pattern: fusion operators rule with regex definition , e.g. add-mul{0,10}, add-mul* - :Param npu_api: api of torch_npu, generally more efficient than torch api - :Param mode: aten or dequeue or optimizer - """ - matched_op_index = set() - total_op_name = "".join([f"{const.OP_SEP}{self._replace_op_name_prefix(event.name, mode)}{const.OP_SEP}" - for event in - getattr(event_dataset, mode)]) - - matched_pattern_index_tuple = [(x.start(0), x.end(0)) for x in re.finditer(op_rule_pattern, total_op_name)] - # convert list of index tuple to a whole list: [(3, 25), ...] -> [3, 25, ...] - total_ops_split_points = [num for sublist in matched_pattern_index_tuple for num in sublist] - - api_ops_matched = len(total_ops_split_points) != 0 - - op_index = [] - if 0 not in total_ops_split_points: - total_ops_split_points = [0] + total_ops_split_points - if len(list(total_op_name)) not in total_ops_split_points: - total_ops_split_points.append(len(list(total_op_name))) - - # convert total ops name like "-add-mul-xxx-div-" to small pieces like [["add", "mul"], [...], ["div"]] - # by the regex index and then calculate the real index for matched fusion operators in event dataset - for l, r in zip(total_ops_split_points, total_ops_split_points[1:]): - matched_op_flag = True if (l, r) in matched_pattern_index_tuple else False - matched_ops_list = total_op_name[l: r].strip(const.OP_SEP).split(const.OP_SEP + const.OP_SEP) - op_index.append([matched_op_flag, len(matched_ops_list)]) - for i, _ in enumerate(op_index): - if i > 0: - # calculate cumsum for indexing matched operator - op_index[i][1] = op_index[i][1] + op_index[i - 1][1] - op_index = [[False, 0]] + op_index - - for i, _ in enumerate(op_index): - if not op_index[i][0]: - continue - index = op_index[i - 1][1] - matched_op_index.add(index) - - if index > len(getattr(event_dataset, mode)) - 1: - continue - dataset_index = getattr(event_dataset, mode)[index].get("dataset_index") - matched_op_index.add(dataset_index) - - if api_ops_matched: - self._matched_op_index[npu_api + f":{op_rule_pattern}"] = sorted(list(matched_op_index)) - - def make_record(self): - """ - make record for what and how to optimize - """ - if not self.matched_op_stacks: - return - - desc = f"Found {len(format_timeline_result(self.matched_op_stacks))} apis to be replaced" \ - f" based on the runtime env cann-{self.cann_version} and torch-{self.torch_version}" - suggestion = "Please replace training api according to sub table 'Affinity training api'" - if self.empty_stacks: - desc += ", but with no stack" - suggestion = const.TIMELINE_EMPTY_STACKS_PROMPT.format( - timeline_profiling_doc_url=const.TIMELINE_WITH_STACK_DOC_URL - ) - - optimization_item = OptimizeItem( - const.AFFINITY_TRAINING_API, - desc, - [suggestion] - ) - - self.result.add(OptimizeRecord(optimization_item)) - - record_title = ["Affinity API", "Code stacks", "Stack called counts"] - self.result.add_detail(const.AFFINITY_TRAINING_API, headers=record_title) - - for api_name, stacks_info in format_timeline_result(self.matched_op_stacks).items(): - if not stacks_info: - detail = [api_name, "null", "null"] - self.result.add_detail(const.AFFINITY_TRAINING_API, detail=detail) - else: - for stack in stacks_info: - detail = [api_name, *stack] - self.result.add_detail(const.AFFINITY_TRAINING_API, detail=detail) - - def make_render(self): - format_result_for_html = format_timeline_result(dict(self.matched_op_stacks), dump_html=True) - - self.html_render.render_template(key="scheduling", - template_dir="templates", - template_name="affinity_api.html", - cann_version=self.cann_version, - torch_version=self.torch_version, - empty_stacks=self.empty_stacks, - with_stack_doc_url=const.TIMELINE_WITH_STACK_DOC_URL, - api_doc_url=const.TIMELINE_API_DOC_URL, - result=format_result_for_html) - - def query_stack(self, event_dataset: TimelineEventDataset): - if all([len(matched_index) == 0 for matched_index in self._matched_op_index.values()]): - return - - op_stack_list = event_dataset.parse_data_with_generator(self._query_stack_by_matched_index) - for op_stack in op_stack_list: - for op_rule, stack in op_stack.items(): - if op_rule not in self.matched_op_stacks: - self.matched_op_stacks[op_rule] = {} - if stack == const.TIMELINE_FUSION_OPS_NO_STACK_FLAG: - continue - if stack not in self.matched_op_stacks[op_rule]: - self.matched_op_stacks[op_rule][stack] = 0 - self.matched_op_stacks[op_rule][stack] += 1 - - def _query_stack_by_matched_index(self, index, event): - stack_record = {} - event = TimelineEvent(event) - - matched_op_rules = [] - for op_rule, matched_index in self._matched_op_index.items(): - if index not in matched_index: - continue - - matched_op_rules.append(op_rule) - stack = event.args.get(const.CALL_STACKS) - - if not stack: - logger.debug("Got empty '%s' for event %s", const.CALL_STACKS, event) - continue - - if self.empty_stacks and stack: - self.empty_stacks = False - - stack_record[op_rule] = stack - - if matched_op_rules and not stack_record: - for op_rule in matched_op_rules: - stack_record[op_rule] = const.TIMELINE_FUSION_OPS_NO_STACK_FLAG - - return stack_record - - def _replace_op_name_prefix(self, event_name, mode): - if mode == const.DEQUEUE.lower(): - op_name_prefix = f"{const.DEQUEUE}{const.DEQUEUE_SEP}" - elif mode == const.ATEN: - op_name_prefix = f"{const.ATEN}{const.ATEN_SEP}" - else: - op_name_prefix = f"{const.OPTIMIZER}.{const.OPTIMIZER_STEP}{const.OPTIMIZER_SEP}" - - return event_name.replace(op_name_prefix, "") - - def _format_rule_to_pattern(self, op_rule): - """ - Args: - op_rule: like (mul){0,1}-(add|neg){0,2}-dropout-(softmax)* - - Returns: op_pattern like (-mul-){0,1}(-add-|-neg-){0,2}(-dropout-)(-softmax-)* - """ - enable_regex = False - if "(" not in op_rule and ")" not in op_rule: - # op_rule which requires fuzzy matching mush consist of "()" - return op_rule, enable_regex - - enable_regex = True - op_pattern_list = op_rule.split(const.OP_SEP) - format_op_pattern = "" - for op_pattern in op_pattern_list: - matched_res = re.search(r'\((.*?)\)', op_pattern) - - ops_index_range = (matched_res.start() + 1, matched_res.end() - 1) if matched_res else ( - 0, len(op_pattern)) - - op_names = op_pattern[ops_index_range[0]: ops_index_range[1]] - tmp_op_names_record = [] - for op_name in op_names.split("|"): - tmp_op_names_record.append(f"{const.OP_SEP}{op_name.strip(' ')}{const.OP_SEP}") - op_suffix = op_pattern[ops_index_range[1] + 1:] - op_names_format = f"({'|'.join(tmp_op_names_record)}){op_suffix}" - - format_op_pattern += op_names_format - return format_op_pattern, enable_regex +import multiprocessing +import logging +import re + +from tqdm import tqdm + +from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.advisor.common import constant as const +from profiler.advisor.common.timeline.event import TimelineEvent +from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset +from profiler.advisor.result.item import OptimizeItem, OptimizeRecord +from profiler.advisor.utils.utils import format_timeline_result +from profiler.advisor.common.timeline.fusion_ops_db import init_timeline_ops_db + +logger = logging.getLogger() + + +class TimelineFusionOpsAnalyzer(BaseAnalyzer): + dataset_cls_list = [TimelineEventDataset] + + def __init__(self, collection_path, n_processes: int = 1, cann_version=const.DEFAULT_CANN_VERSION, + torch_version=const.DEFAULT_TORCH_VERSION, **kwargs): + super().__init__(collection_path, n_processes, cann_version, torch_version, **kwargs) + self._matched_op_index = {} if self.n_processes <= 1 else multiprocessing.Manager().dict() + self.matched_op_stacks = {} + self.empty_stacks = True + key = TimelineEventDataset.get_key() + self.timeline_event_dataset = self.get_first_data_by_key(self.dataset_list, key) + + def optimize(self, **kwargs): + for mode in [const.ATEN.lower(), const.OPTIMIZER.lower()]: + + for op_combined, npu_apis in tqdm(getattr(init_timeline_ops_db(self.cann_version, self.torch_version), + f"_{mode}_op_api_map").items(), leave=False, ncols=100, + desc="Scanning timeline for affinity apis"): + for npu_api in npu_apis.split("/"): + self.find_fusion_ops(self.timeline_event_dataset, op_combined, npu_api, mode) + + self.query_stack(self.timeline_event_dataset) + + logger.info("Finish timeline analysis") + self.make_record() + self.make_render() + return self.result + + def find_fusion_ops(self, event_dataset, ops: str, npu_api: str, mode: str): + """ + :Param event_dataset: dataset of timeline event + :Param ops: operator combination with '-' as separator , e.g. permute-reshape + :Param npu_api: api of torch_npu, generally more efficient than torch api + :Param mode: aten or dequeue or optimizer + :Return: json of op_name and called times and detail stacks + """ + op_rule_pattern, enable_regex = self._format_rule_to_pattern(ops) + if not enable_regex: + self._match_ops(event_dataset, op_rule_pattern, npu_api, mode) + else: + try: + self._match_ops_with_regex(event_dataset, op_rule_pattern, npu_api, mode) + except Exception as e: + logger.warning("Failed to find fusion operators with regex %s, reason is %s", ops, e) + + def _match_ops(self, event_dataset, ops: str, npu_api: str, mode: str): + """ match operator based on fusion operators rule(without regex), + only strictly equals of op name list means matched + :Param event_dataset: dataset of timeline event + :Param ops: operator combination with '-' as separator , e.g. permute-reshape + :Param npu_api: api of torch_npu, generally more efficient than torch api + :Param mode: aten or dequeue or optimizer + """ + op_list = ops.split(const.OP_SEP) + + matched_op_index = set() + api_ops_matched = False + + for index, event in enumerate(getattr(event_dataset, mode)): + if self._replace_op_name_prefix(event.name, mode) != op_list[0]: + continue + tmp_dequeue_event_names = [self._replace_op_name_prefix(event.name, mode) for event in + getattr(event_dataset, mode)[index: index + len(op_list)]] + if tmp_dequeue_event_names != op_list: + continue + api_ops_matched = True + matched_op_index.add(event.dataset_index) + + if api_ops_matched: + self._matched_op_index[npu_api + f":{ops}"] = matched_op_index + + def _match_ops_with_regex(self, event_dataset, op_rule_pattern: str, npu_api: str, + mode: str): + """ match operator based on fusion operators rule(with regex), + using regex to support condition like 'a = torch.mul(xxx) if xxx else torch.add(xxx)' + :Param event_dataset: dataset of timeline event + :Param op_rule_pattern: fusion operators rule with regex definition , e.g. add-mul{0,10}, add-mul* + :Param npu_api: api of torch_npu, generally more efficient than torch api + :Param mode: aten or dequeue or optimizer + """ + matched_op_index = set() + total_op_name = "".join([f"{const.OP_SEP}{self._replace_op_name_prefix(event.name, mode)}{const.OP_SEP}" + for event in + getattr(event_dataset, mode)]) + + matched_pattern_index_tuple = [(x.start(0), x.end(0)) for x in re.finditer(op_rule_pattern, total_op_name)] + # convert list of index tuple to a whole list: [(3, 25), ...] -> [3, 25, ...] + total_ops_split_points = [num for sublist in matched_pattern_index_tuple for num in sublist] + + api_ops_matched = len(total_ops_split_points) != 0 + + op_index = [] + if 0 not in total_ops_split_points: + total_ops_split_points = [0] + total_ops_split_points + if len(list(total_op_name)) not in total_ops_split_points: + total_ops_split_points.append(len(list(total_op_name))) + + # convert total ops name like "-add-mul-xxx-div-" to small pieces like [["add", "mul"], [...], ["div"]] + # by the regex index and then calculate the real index for matched fusion operators in event dataset + for l, r in zip(total_ops_split_points, total_ops_split_points[1:]): + matched_op_flag = True if (l, r) in matched_pattern_index_tuple else False + matched_ops_list = total_op_name[l: r].strip(const.OP_SEP).split(const.OP_SEP + const.OP_SEP) + op_index.append([matched_op_flag, len(matched_ops_list)]) + for i, _ in enumerate(op_index): + if i > 0: + # calculate cumsum for indexing matched operator + op_index[i][1] = op_index[i][1] + op_index[i - 1][1] + op_index = [[False, 0]] + op_index + + for i, _ in enumerate(op_index): + if not op_index[i][0]: + continue + index = op_index[i - 1][1] + matched_op_index.add(index) + + if index > len(getattr(event_dataset, mode)) - 1: + continue + dataset_index = getattr(event_dataset, mode)[index].get("dataset_index") + matched_op_index.add(dataset_index) + + if api_ops_matched: + self._matched_op_index[npu_api + f":{op_rule_pattern}"] = sorted(list(matched_op_index)) + + def make_record(self): + """ + make record for what and how to optimize + """ + if not self.matched_op_stacks: + return + + desc = f"Found {len(format_timeline_result(self.matched_op_stacks))} apis to be replaced" \ + f" based on the runtime env cann-{self.cann_version} and torch-{self.torch_version}" + suggestion = "Please replace training api according to sub table 'Affinity training api'" + if self.empty_stacks: + desc += ", but with no stack" + suggestion = const.TIMELINE_EMPTY_STACKS_PROMPT.format( + timeline_profiling_doc_url=const.TIMELINE_WITH_STACK_DOC_URL + ) + + optimization_item = OptimizeItem( + const.AFFINITY_TRAINING_API, + desc, + [suggestion] + ) + + self.result.add(OptimizeRecord(optimization_item)) + + record_title = ["Affinity API", "Code stacks", "Stack called counts"] + self.result.add_detail(const.AFFINITY_TRAINING_API, headers=record_title) + + for api_name, stacks_info in format_timeline_result(self.matched_op_stacks).items(): + if not stacks_info: + detail = [api_name, "null", "null"] + self.result.add_detail(const.AFFINITY_TRAINING_API, detail=detail) + else: + for stack in stacks_info: + detail = [api_name, *stack] + self.result.add_detail(const.AFFINITY_TRAINING_API, detail=detail) + + def make_render(self): + format_result_for_html = format_timeline_result(dict(self.matched_op_stacks), dump_html=True) + + self.html_render.render_template(key="schedule", + template_dir="templates", + template_name="affinity_api.html", + cann_version=self.cann_version, + torch_version=self.torch_version, + empty_stacks=self.empty_stacks, + with_stack_doc_url=const.TIMELINE_WITH_STACK_DOC_URL, + api_doc_url=const.TIMELINE_API_DOC_URL, + result=format_result_for_html) + + def query_stack(self, event_dataset): + if all([len(matched_index) == 0 for matched_index in self._matched_op_index.values()]): + return + + op_stack_list = event_dataset.parse_data_with_generator(self._query_stack_by_matched_index) + for op_stack in op_stack_list: + for op_rule, stack in op_stack.items(): + if op_rule not in self.matched_op_stacks: + self.matched_op_stacks[op_rule] = {} + if stack == const.TIMELINE_FUSION_OPS_NO_STACK_FLAG: + continue + if stack not in self.matched_op_stacks[op_rule]: + self.matched_op_stacks[op_rule][stack] = 0 + self.matched_op_stacks[op_rule][stack] += 1 + + def _query_stack_by_matched_index(self, index, event): + stack_record = {} + event = TimelineEvent(event) + + matched_op_rules = [] + for op_rule, matched_index in self._matched_op_index.items(): + if index not in matched_index: + continue + + matched_op_rules.append(op_rule) + stack = event.args.get(const.CALL_STACKS) + + if not stack: + logger.debug("Got empty '%s' for event %s", const.CALL_STACKS, event) + continue + + if self.empty_stacks and stack: + self.empty_stacks = False + + stack_record[op_rule] = stack + + if matched_op_rules and not stack_record: + for op_rule in matched_op_rules: + stack_record[op_rule] = const.TIMELINE_FUSION_OPS_NO_STACK_FLAG + + return stack_record + + def _replace_op_name_prefix(self, event_name, mode): + if mode == const.DEQUEUE.lower(): + op_name_prefix = f"{const.DEQUEUE}{const.DEQUEUE_SEP}" + elif mode == const.ATEN: + op_name_prefix = f"{const.ATEN}{const.ATEN_SEP}" + else: + op_name_prefix = f"{const.OPTIMIZER}.{const.OPTIMIZER_STEP}{const.OPTIMIZER_SEP}" + + return event_name.replace(op_name_prefix, "") + + def _format_rule_to_pattern(self, op_rule): + """ + Args: + op_rule: like (mul){0,1}-(add|neg){0,2}-dropout-(softmax)* + + Returns: op_pattern like (-mul-){0,1}(-add-|-neg-){0,2}(-dropout-)(-softmax-)* + """ + enable_regex = False + if "(" not in op_rule and ")" not in op_rule: + # op_rule which requires fuzzy matching mush consist of "()" + return op_rule, enable_regex + + enable_regex = True + op_pattern_list = op_rule.split(const.OP_SEP) + format_op_pattern = "" + for op_pattern in op_pattern_list: + matched_res = re.search(r'\((.*?)\)', op_pattern) + + ops_index_range = (matched_res.start() + 1, matched_res.end() - 1) if matched_res else ( + 0, len(op_pattern)) + + op_names = op_pattern[ops_index_range[0]: ops_index_range[1]] + tmp_op_names_record = [] + for op_name in op_names.split("|"): + tmp_op_names_record.append(f"{const.OP_SEP}{op_name.strip(' ')}{const.OP_SEP}") + op_suffix = op_pattern[ops_index_range[1] + 1:] + op_names_format = f"({'|'.join(tmp_op_names_record)}){op_suffix}" + + format_op_pattern += op_names_format + return format_op_pattern, enable_regex diff --git a/profiler/advisor/common/constant.py b/profiler/advisor/common/constant.py index df12fd76d3..664753c724 100644 --- a/profiler/advisor/common/constant.py +++ b/profiler/advisor/common/constant.py @@ -56,8 +56,8 @@ CANN_VERSION_C13 = '7.0.RC1' CANN_VERSION_C15 = '7.0.0' CANN_VERSION_C17 = '8.0.0' SUPPORTED_CANN_VERSION = [CANN_VERSION_C30, CANN_VERSION_C13, CANN_VERSION_C15, CANN_VERSION_C17] -DEFAULT_CANN_VERSION = CANN_VERSION_C15 -ASCEND_PYTORCH_PROFILER = "ascend_pytorch_proflier" +DEFAULT_CANN_VERSION = CANN_VERSION_C17 +ASCEND_PYTORCH_PROFILER = "ascend_pytorch_profiler" MSLITE = "mslite" MSPROF = "msprof" SUPPORTED_PROFILING_TYPE = [ASCEND_PYTORCH_PROFILER, MSLITE, MSPROF] @@ -83,12 +83,20 @@ ADVISOR_LOG_LEVEL = "ADVISOR_LOG_LEVEL" DEFAULT_LOG_LEVEL = "INFO" SUPPORTED_LOG_LEVEL = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] +RULE_BUCKET = "RULE-BUCKET" CLOUD_RULE_REGION_CN_NORTH_9 = "cn-north-9" CLOUD_RULE_REGION_CN_NORTH_7 = "cn-north-7" CLOUD_RULE_REGION_CN_SOUTHWEST_2 = "cn-southwest-2" CLOUD_RULE_REGION_LIST = [CLOUD_RULE_REGION_CN_NORTH_7, CLOUD_RULE_REGION_CN_NORTH_9, CLOUD_RULE_REGION_CN_SOUTHWEST_2] +INNER_REGION_LIST = [CLOUD_RULE_REGION_CN_NORTH_7] DEFAULT_CLOUD_RULE_REGION = CLOUD_RULE_REGION_CN_SOUTHWEST_2 +HTTP_PREFIXES = "http://" +HTTPS_PREFIXES = "https://" +COMMON_YAML_DIR = "modelarts/solution/ma_advisor_rules/" +COMMON_ENDPOINT_SUFFIX = "obs.{}.myhuaweicloud.com" +INNER_ENDPOINT_SUFFIX= "obs.{}.ulanqab.huawei.com" + AICPU_RULES_YAML_NAME = "aicpu_rules.yaml" FUSSION_PASS_YAML_NAME = "op_fussion_pass.yaml" TIMELINE_FUSION_OPS_YAML_NAME = "timeline_fusion_ops.yaml" diff --git a/profiler/advisor/common/timeline/fusion_ops_db.py b/profiler/advisor/common/timeline/fusion_ops_db.py index 19a86437e0..f37cfe50d1 100644 --- a/profiler/advisor/common/timeline/fusion_ops_db.py +++ b/profiler/advisor/common/timeline/fusion_ops_db.py @@ -1,330 +1,44 @@ -import copy import logging import os import yaml -from profiler.advisor.common import constant as const +from profiler.advisor.common import constant +from profiler.advisor.common.timeline.fusion_ops_rule import OpRule +from profiler.advisor.common.timeline.fusion_ops_rule_handler import TimelineOpRuleHandler from profiler.advisor.utils.log import get_log_level +from profiler.advisor.utils.utils import get_file_path_by_walk logger = logging.getLogger() logger.setLevel(get_log_level()) -class TimelineOpRuleHandler: - """基于线性规划思想保存OpRule,用于局部继承、全局继承等功能""" - - def __init__(self): - self._db_content = None - # 具体生成的timeline规则,key为unique_id - self._all_tmp_timeline_op_rule = {} - # 所有timeline规则的dict集合,key为unique_id - self._all_origin_timeline_op_rule_dict = {} - # 已生成timeline规则的id数组 - self._exist_timeline_op_rule_unique_id_list = [] - - @staticmethod - def _get_local_inherit_id_list(op_rule: dict): - local_inherit_id_list = [] - for _, val in op_rule.items(): - if val.get("inherit_unique_id") is not None: - local_inherit_id_list.append(val.get("inherit_unique_id")) - return local_inherit_id_list - - @staticmethod - def _is_duplicated_element_in_lists(list_a, list_b): - """检查两个数组中是否存在重复的元素,若有任意元素重复,返回True""" - if not isinstance(list_a, list): - list_a = [list_a] - if not isinstance(list_b, list): - list_b = [list_b] - for element in list_a: - if element in list_b: - return True - return False - - def set_db_content(self, db_content): - # 过滤非 dict 格式, 或 dict 中没有定义 unique_id 的数据, 并保存到 _all_origin_timeline_op_rule_dict 中 - self._db_content = copy.deepcopy(db_content) - for rule_dic in self._db_content: - if not isinstance(rule_dic, dict) or rule_dic.get("unique_id") is None: - continue - self._all_origin_timeline_op_rule_dict[rule_dic.get("unique_id")] = rule_dic - if self._all_origin_timeline_op_rule_dict: - self.generate_all_timeline_op_rule() - - def generate_basic_timeline_op_rules(self): - """用于实现获取无全局继承规则, 无全局继承的规则认为是基础版本规则, 默认不会存在局部继承""" - for _, rule_dic in self._all_origin_timeline_op_rule_dict.items(): - if rule_dic.get("inherit_unique_id") is None: - self.add_basic_timeline_op_rule(rule_dic) - - def add_basic_timeline_op_rule(self, rule_dic): - # 若基础规则中存在局部继承的规则,则跳过 - local_inherit_id_list = self._get_local_inherit_id_list(rule_dic.get("operator_rules")) - if local_inherit_id_list: - return - - temp_rule = OpRule() - temp_rule.merge(rule_dic.get("operator_rules")) - - unique_id = rule_dic.get("unique_id") - logger.debug("The rule of version %s is basic rule.", unique_id) - self.add_new_timeline_op_rule(unique_id, temp_rule.tmp_rule) - - def add_empty_timeline_op_rule(self, unique_id): - if self._all_origin_timeline_op_rule_dict.get(unique_id) is None: - self._all_origin_timeline_op_rule_dict[unique_id] = {} - tmp_rule = {} - logger.debug("The rule of version %s is empty.", unique_id) - self.add_new_timeline_op_rule(unique_id, tmp_rule) - - def add_new_timeline_op_rule(self, unique_id, tmp_rule): - if unique_id not in self._exist_timeline_op_rule_unique_id_list: - self._exist_timeline_op_rule_unique_id_list.append(unique_id) - self._all_tmp_timeline_op_rule[unique_id] = tmp_rule - logger.debug("The rule of version %s is successfully generated.", unique_id) - - def generate_specified_list_timeline_op_rule(self, specified_unique_id_list, kid_id_list=None): - for specified_unique_id in specified_unique_id_list: - if specified_unique_id in self._exist_timeline_op_rule_unique_id_list: - self.generate_specified_timeline_op_rule(specified_unique_id, kid_id_list) - - def generate_specified_timeline_op_rule(self, specified_unique_id, kid_id_list=None): - """用于实现生成特定版本规则 - - 若不存在相应specified_unique_id的规则、或是已生成、循环继承等情况,将该规则置空并返回 - 规则库文件结构设置为多叉树, 结构决定了不断向下搜索最终应该是从基础版本开始继承, 递归生成, - 直到specified_unique_id规则依赖继承的规则库全部生成完毕, 再生成该指定规则库, 将specified_unique_id的规则库归档 - - 参数: - specified_unique_id: 指定版本规则id - kid_id_list: 子规则id数组, 用于防止循环继承, 如间接继承自身或直接继承自身等情况 - 返回: - None - """ - if kid_id_list is None: - kid_id_list = [] - - # 若该unique_id规则在timeline_fusion_ops.yaml中没有相应的规则, 生成该id规则,置为空 - if self._all_origin_timeline_op_rule_dict.get(specified_unique_id) is None: - logger.warning("The specified version %s does not exist in the rule library. " - "Ensure that the corresponding rule is configured in the YAML file. " - "The version %s is left blank.", - specified_unique_id, - specified_unique_id) - self.add_empty_timeline_op_rule(specified_unique_id) - return - - # 若该unique_id规则已经生成,则无需再次生成 - if specified_unique_id in self._exist_timeline_op_rule_unique_id_list: - logger.warning("The rule has been generated and does not need to be generated again. " - "Check whether unique id %s in the YAML file is duplicate.", - specified_unique_id) - return - - # 若kid_id_list不为空,且间接继承自身,则尝试生成空规则用于继承 - if kid_id_list and self._is_duplicated_element_in_lists(specified_unique_id, kid_id_list): - logger.warning("It cannot be inherited indirectly. Ensure that the corresponding rules are correctly " - "configured in the YAML file and leave Version %s blank.", - specified_unique_id) - self.add_empty_timeline_op_rule(specified_unique_id) - return - - rule_dic = self._all_origin_timeline_op_rule_dict.get(specified_unique_id) - if rule_dic is not None: - kid_id_list.append(specified_unique_id) - - global_inherit_id = rule_dic.get("inherit_unique_id") - if global_inherit_id and global_inherit_id not in self._exist_timeline_op_rule_unique_id_list: - logger.debug("The rule of version %s global inherit the rule of version %s", - specified_unique_id, global_inherit_id) - self.generate_specified_timeline_op_rule(global_inherit_id, kid_id_list) - - # 若局部继承的规则未生成, 生成该规则 - local_inherit_id_list = self._get_local_inherit_id_list(rule_dic.get("operator_rules")) - if local_inherit_id_list: - logger.debug("The rule of version %s local inherit the rule of version %s", - specified_unique_id, local_inherit_id_list) - self.generate_specified_list_timeline_op_rule(specified_unique_id_list=local_inherit_id_list, - kid_id_list=kid_id_list) - logger.debug("Start to generate rule of version %s", specified_unique_id) - # 实现全局继承与局部继承 - temp_rule = OpRule(timeline_op_rule_handler=self, - rule=self._all_tmp_timeline_op_rule.get(global_inherit_id)) - temp_rule.merge(rule_dic.get("operator_rules")) - # 将生成的规则归档保存 - self.add_new_timeline_op_rule(specified_unique_id, temp_rule.tmp_rule) - return - logger.error("Failed to generate the rule whose unique_id is %s. Ensure that the rule is configured in " - "the YAML file and the version %s is empty.", specified_unique_id, specified_unique_id) - self.add_empty_timeline_op_rule(specified_unique_id) - - def generate_all_timeline_op_rule(self): - """用于实现获取所有版本规则 - - 查找db_content中的规则库, 规则库文件结构设置为多叉树, 优先生成无继承的基础规则版本 - 循环并生成其他版本, 文件结构决定了不断向下搜索最终应该是从基础版本开始继承, 递归生成,直到全部规则库生成后退出函数 - - 参数: - None - 返回: - None - """ - self.generate_basic_timeline_op_rules() - _unique_id_list = copy.deepcopy(list(self._all_origin_timeline_op_rule_dict.keys())) - for unique_id in _unique_id_list: - if unique_id in self._exist_timeline_op_rule_unique_id_list: - continue - self.generate_specified_timeline_op_rule(unique_id) - - def get_tmp_timeline_op_rule_with_unique_id(self, unique_id): - if unique_id not in self._exist_timeline_op_rule_unique_id_list: - logger.error("The specified unique_id does not exist in the rule library. Ensure that the " - "corresponding rule is configured in the YAML file and the version %s is empty." - "If the value of unique_id is a negative number, the version may not be supported.", - unique_id) - self.add_empty_timeline_op_rule(unique_id) - if unique_id < 0: - logger.error("Advise to use a positive integer as the unique id of rules. " - "Negative numbers: %s are not recommended to use as unique id. " - "If specified invalid unique id: %s is used, an empty rule is returned by default.", - unique_id, const.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID) - return self._all_tmp_timeline_op_rule.get(unique_id) - - -class OpRule: - - def __init__(self, rule=None, timeline_op_rule_handler=None): - if rule is None: - self._tmp_rule = {} - else: - self._tmp_rule = copy.deepcopy(rule) - if timeline_op_rule_handler is None: - self.timeline_op_rule_handler = {} - else: - self.timeline_op_rule_handler = copy.deepcopy(timeline_op_rule_handler) - self._rule = {} - - @property - def tmp_rule(self): - return self._tmp_rule - - @staticmethod - def _format_rule(rule): - """格式化规则函数, 将额外规则格式化为{key,数组list}形式, 使得yaml文件中operator_rules若写成key:str形式也能正常读取""" - format_rule = {} - for key, val in rule.items(): - if not isinstance(val, list): - val = [val] - format_rule[key] = val - return format_rule - - def merge(self, extra_rule): - """合并函数, 将已有规则库与额外规则合并, 若无继承则已有规则库应为空""" - for key, val in extra_rule.items(): - for func, op_rules in val.items(): - try: - getattr(self, f"{func}")(key, op_rules) - except AttributeError: - logger.error("Undefined field and function name. Ensure that %s is correct in the rule " - "library.", func) - - def get_final_rules(self): - """获取最终的规则库""" - self._restore_rule() - return self._rule - - def add(self, key, add_rules: dict): - """新增函数, 新增已有规则库不存在的额外规则""" - if add_rules is None: - return - if self._tmp_rule.get(key) is None: - self._tmp_rule[key] = {} - format_add_rule = self._format_rule(add_rules) - for add_key, add_val in format_add_rule.items(): - logger.debug("add: %s: %s", add_key, add_val) - if add_key not in self._tmp_rule: - self._tmp_rule[key][add_key] = add_val - else: - logger.warning("This key has been written to the rule, " - "%s: %s should be written in the overwrite section", add_key, add_val) - self._tmp_rule[key][add_key].update(add_val) - - def overwrite(self, key, overwrite_rules: dict): - """重写函数, 重写已有规则库中已经存在的规则""" - if overwrite_rules is None: - return - if self._tmp_rule.get(key) is None: - self._tmp_rule[key] = {} - format_overwrite_rules = self._format_rule(overwrite_rules) - for overwrite_key, overwrite_val in format_overwrite_rules.items(): - logger.debug("overwrite: %s: %s", overwrite_key, overwrite_val) - if overwrite_key not in self._tmp_rule: - logger.warning("This key is not written to the rule. " - "%s: %s should be written in the add section", overwrite_key, overwrite_val) - self._tmp_rule[key][overwrite_key] = overwrite_val - else: - self._tmp_rule[key][overwrite_key].update(overwrite_val) - - def exclude(self, key, exclude_rules: list): - """除外函数, 将已有规则库已有的规则除外删除""" - if exclude_rules is None: - return - for exclude_key in exclude_rules: - logger.debug("exclude: %s", exclude_key) - if isinstance(exclude_key, str): - if exclude_key not in self._tmp_rule[key]: - logger.warning("This key is not written to the rule. " - "do not need to exclude: %s.", exclude_key) - continue - self._tmp_rule[key].pop(exclude_key) - else: - logger.warning("Error type rule in exclude: %s", exclude_key) - - def inherit_unique_id(self, key, inherit_unique_id): - """局部继承函数, 将规则库中指定unique_id版本覆盖指定位置""" - result_rule = self.timeline_op_rule_handler.get_tmp_timeline_op_rule_with_unique_id(inherit_unique_id) - if result_rule is not None and result_rule.get(key) is not None: - self._tmp_rule[key] = copy.deepcopy(result_rule.get(key)) - return - logger.error("Rule library version %s does not exist. ", inherit_unique_id) - - def _restore_rule(self): - for key, op_api_map in self._tmp_rule.items(): - self._rule[key] = [{op_combined: api} for op_combined, api in op_api_map.items()] - +def init_timeline_ops_db(cann_version=None, torch_version=None): + logger.debug("init operators database") -def get_file_path_by_walk(root, filename): - file_path = "" - for root, _, files in os.walk(root, topdown=True): - for name in files: - if name == filename: - file_path = os.path.join(root, name) - return file_path - return file_path + return FusionOperatorDB(cann_version=cann_version, torch_version=torch_version) def get_timeline_fusion_ops_yaml_path(): # 环境变量 ADVISOR_RULE_PATH 不为空且该路径存在, os.walk遍历其下文件, 若存在相应的规则文件则返回路径 - advisor_rule_path = os.getenv(const.ADVISOR_RULE_PATH) + advisor_rule_path = os.getenv(constant.ADVISOR_RULE_PATH) if advisor_rule_path and os.path.exists(advisor_rule_path): - specified_file_path = get_file_path_by_walk(advisor_rule_path, const.TIMELINE_FUSION_OPS_YAML_NAME) + specified_file_path = get_file_path_by_walk(advisor_rule_path, constant.TIMELINE_FUSION_OPS_YAML_NAME) if len(specified_file_path.strip()) and os.path.exists(specified_file_path): logger.debug("Successfully find The %s file which is specified by the environment variable: %s.", - specified_file_path, const.ADVISOR_RULE_PATH) + specified_file_path, constant.ADVISOR_RULE_PATH) return specified_file_path logger.warning("The %s does not exist in path: %s. Try to use cloud or default local YAML file.", - const.TIMELINE_FUSION_OPS_YAML_NAME, os.path.normpath(advisor_rule_path)) + constant.TIMELINE_FUSION_OPS_YAML_NAME, os.path.normpath(advisor_rule_path)) # 检查云文件默认保存路径文件夹下是否存在相应文件, 默认路径 ~/rules/cloud/ - cloud_file_path = os.path.join(os.path.expanduser("~"), const.CLOUD_RULE_PATH, const.TIMELINE_FUSION_OPS_YAML_NAME) + cloud_file_path = os.path.join(os.path.expanduser("~"), constant.CLOUD_RULE_PATH, constant.TIMELINE_FUSION_OPS_YAML_NAME) if os.path.exists(cloud_file_path): - logger.debug("Successfully find The cloud %s file in %s.", const.TIMELINE_FUSION_OPS_YAML_NAME, + logger.debug("Successfully find The cloud %s file in %s.", constant.TIMELINE_FUSION_OPS_YAML_NAME, cloud_file_path) return cloud_file_path # 检查本地默认文件 local_file_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), - const.DEFAULT_RULE_PATH, const.TIMELINE_FUSION_OPS_YAML_NAME) + constant.DEFAULT_RULE_PATH, constant.TIMELINE_FUSION_OPS_YAML_NAME) if not os.path.exists(local_file_path): # 若本地默认文件不存在, 则log异常信息并 logger.error("The default local YAML file does not exist. Please check the YAML file in the default path %s.", @@ -337,8 +51,8 @@ class FusionOperatorDB: def __init__(self, file_path=None, cann_version=None, torch_version=None): self.timeline_fusion_ops_yaml_path = os.path.normpath(get_timeline_fusion_ops_yaml_path()) - self.cann_version = cann_version or const.DEFAULT_CANN_VERSION - self.torch_version = torch_version or const.DEFAULT_TORCH_VERSION + self.cann_version = cann_version or constant.DEFAULT_CANN_VERSION + self.torch_version = torch_version or constant.DEFAULT_TORCH_VERSION self._supported_version_dict = {} @@ -379,9 +93,9 @@ class FusionOperatorDB: return self._optimizer_op_api_map def get_fusion_operator_with_unique_id(self, unique_id): - if unique_id == const.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID: + if unique_id == constant.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID: logger.warning("The specified unique id: %s is invalid.Please check whether the rule of the unique id " - "exists and modify the rule.", const.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID) + "exists and modify the rule.", constant.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID) return {} result_tmp_rule = self.timeline_op_rule_handler.get_tmp_timeline_op_rule_with_unique_id(unique_id) result_op_rule = OpRule(result_tmp_rule) @@ -411,7 +125,7 @@ class FusionOperatorDB: def _is_version_supported(self, db_content): """校验当前版本是否被规则库中的版本支持, 保存版本支持信息数组, 按数组或字符串的可变方式保存""" - if db_content is None : + if db_content is None: logger.warning( "The rule library is empty. Check the rule library file: %s", self.timeline_fusion_ops_yaml_path @@ -455,18 +169,18 @@ class FusionOperatorDB: def _is_version_supported_in_supported_version_dict(self, cann_version=None, torch_version=None): """校验当前版本是否存在在规则库中的版本支持字典中""" for _, supported_version in self._supported_version_dict.items(): - if self._is_version_supported_in_version(supported_version, cann_version, torch_version): + if self._is_version_supported_in_versions(supported_version, cann_version, torch_version): return True return False def _get_unique_id_in_supported_version_dict(self, cann_version=None, torch_version=None) -> int: """校验当前版本是否存在在规则库中的版本支持字典中, 在使用前请检查是否支持该版本""" for key_unique_id, supported_version in self._supported_version_dict.items(): - if self._is_version_supported_in_version(supported_version, cann_version, torch_version): + if self._is_version_supported_in_versions(supported_version, cann_version, torch_version): return key_unique_id - return const.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID + return constant.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID - def _is_version_supported_in_version(self, supported_version, cann_version=None, torch_version=None): + def _is_version_supported_in_versions(self, supported_version, cann_version=None, torch_version=None): """校验当前cann版本和torch版本是否存在在规则库中的版本支持数组的元素中""" cann_version_list = supported_version[0] if not isinstance(cann_version_list, list): @@ -485,9 +199,9 @@ class FusionOperatorDB: def _parse_db(self): """生成输出的规则库""" - self._parse(const.ATEN) - self._parse(const.DEQUEUE) - self._parse(const.OPTIMIZER) + self._parse(constant.ATEN) + self._parse(constant.DEQUEUE) + self._parse(constant.OPTIMIZER) def _parse(self, mode): """生成输出的规则库中指定部分, 如aten, Optimizer等""" @@ -521,7 +235,7 @@ class FusionOperatorDB: if not os.path.exists(file_path): logger.warning("Path: '%s' does not exist, please specific existed path of " "fusion operators yaml file by setting env '%s'", - os.path.abspath(file_path), const.ADVISOR_RULE_PATH) + os.path.abspath(file_path), constant.ADVISOR_RULE_PATH) self.is_empty = True return {} diff --git a/profiler/advisor/common/timeline/fusion_ops_rule.py b/profiler/advisor/common/timeline/fusion_ops_rule.py new file mode 100644 index 0000000000..deee68edb9 --- /dev/null +++ b/profiler/advisor/common/timeline/fusion_ops_rule.py @@ -0,0 +1,110 @@ +# Copyright (c) Huawei Technologies Co., Ltd. 2024-2024. All rights reserved. +import copy +import logging + +from profiler.advisor.utils.log import get_log_level + +logger = logging.getLogger() +logger.setLevel(get_log_level()) + + +class OpRule: + + def __init__(self, rule=None, timeline_op_rule_handler=None): + if rule is None: + self._tmp_rule = {} + else: + self._tmp_rule = copy.deepcopy(rule) + if timeline_op_rule_handler is None: + self.timeline_op_rule_handler = {} + else: + self.timeline_op_rule_handler = copy.deepcopy(timeline_op_rule_handler) + self._rule = {} + + @property + def tmp_rule(self): + return self._tmp_rule + + @staticmethod + def _format_rule(rule): + """格式化规则函数, 将额外规则格式化为{key,数组list}形式, 使得yaml文件中operator_rules若写成key:str形式也能正常读取""" + format_rule = {} + for key, val in rule.items(): + if not isinstance(val, list): + val = [val] + format_rule[key] = val + return format_rule + + def merge(self, extra_rule): + """合并函数, 将已有规则库与额外规则合并, 若无继承则已有规则库应为空""" + for key, val in extra_rule.items(): + for func, op_rules in val.items(): + try: + getattr(self, f"{func}")(key, op_rules) + except AttributeError: + logger.error("Undefined field and function name. Ensure that %s is correct in the rule " + "library.", func) + + def get_final_rules(self): + """获取最终的规则库""" + self._restore_rule() + return self._rule + + def add(self, key, add_rules: dict): + """新增函数, 新增已有规则库不存在的额外规则""" + if add_rules is None: + return + if self._tmp_rule.get(key) is None: + self._tmp_rule[key] = {} + format_add_rule = self._format_rule(add_rules) + for add_key, add_val in format_add_rule.items(): + logger.debug("add: %s: %s", add_key, add_val) + if add_key not in self._tmp_rule: + self._tmp_rule[key][add_key] = add_val + else: + logger.warning("This key has been written to the rule, " + "%s: %s should be written in the overwrite section", add_key, add_val) + self._tmp_rule[key][add_key].update(add_val) + + def overwrite(self, key, overwrite_rules: dict): + """重写函数, 重写已有规则库中已经存在的规则""" + if overwrite_rules is None: + return + if self._tmp_rule.get(key) is None: + self._tmp_rule[key] = {} + format_overwrite_rules = self._format_rule(overwrite_rules) + for overwrite_key, overwrite_val in format_overwrite_rules.items(): + logger.debug("overwrite: %s: %s", overwrite_key, overwrite_val) + if overwrite_key not in self._tmp_rule: + logger.warning("This key is not written to the rule. " + "%s: %s should be written in the add section", overwrite_key, overwrite_val) + self._tmp_rule[key][overwrite_key] = overwrite_val + else: + self._tmp_rule[key][overwrite_key].update(overwrite_val) + + def exclude(self, key, exclude_rules: list): + """除外函数, 将已有规则库已有的规则除外删除""" + if exclude_rules is None: + return + for exclude_key in exclude_rules: + logger.debug("exclude: %s", exclude_key) + if isinstance(exclude_key, str): + if exclude_key not in self._tmp_rule[key]: + logger.warning("This key is not written to the rule. " + "do not need to exclude: %s.", exclude_key) + continue + self._tmp_rule[key].pop(exclude_key) + else: + logger.warning("Error type rule in exclude: %s", exclude_key) + + def inherit_unique_id(self, key, inherit_unique_id): + """局部继承函数, 将规则库中指定unique_id版本覆盖指定位置""" + result_rule = self.timeline_op_rule_handler.get_tmp_timeline_op_rule_with_unique_id(inherit_unique_id) + if result_rule is not None and result_rule.get(key) is not None: + self._tmp_rule[key] = copy.deepcopy(result_rule.get(key)) + return + logger.error("Rule library version %s does not exist. ", inherit_unique_id) + + def _restore_rule(self): + for key, op_api_map in self._tmp_rule.items(): + self._rule[key] = [{op_combined: api} for op_combined, api in op_api_map.items()] diff --git a/profiler/advisor/common/timeline/fusion_ops_rule_handler.py b/profiler/advisor/common/timeline/fusion_ops_rule_handler.py new file mode 100644 index 0000000000..b0558cca6d --- /dev/null +++ b/profiler/advisor/common/timeline/fusion_ops_rule_handler.py @@ -0,0 +1,193 @@ +# Copyright (c) Huawei Technologies Co., Ltd. 2024-2024. All rights reserved. +import copy +import logging + +from profiler.advisor.common import constant +from profiler.advisor.common.timeline.fusion_ops_rule import OpRule +from profiler.advisor.utils.log import get_log_level + +logger = logging.getLogger() +logger.setLevel(get_log_level()) + + +class TimelineOpRuleHandler: + """基于线性规划思想保存OpRule,用于局部继承、全局继承等功能""" + + def __init__(self): + self._db_content = None + # 具体生成的timeline规则,key为unique_id + self._all_tmp_timeline_op_rule = {} + # 所有timeline规则的dict集合,key为unique_id + self._all_origin_timeline_op_rule_dict = {} + # 已生成timeline规则的id数组 + self._exist_timeline_op_rule_unique_id_list = [] + + @staticmethod + def _get_local_inherit_id_list(op_rule: dict): + local_inherit_id_list = [] + for _, val in op_rule.items(): + if val.get("inherit_unique_id") is not None: + local_inherit_id_list.append(val.get("inherit_unique_id")) + return local_inherit_id_list + + @staticmethod + def _is_duplicated_element_in_lists(list_a, list_b): + """检查两个数组中是否存在重复的元素,若有任意元素重复,返回True""" + if not isinstance(list_a, list): + list_a = [list_a] + if not isinstance(list_b, list): + list_b = [list_b] + # 将两个数组合并为一个列表,使用集合(set)判断列表中是否存在重复元素 + combined_list = list_a + list_b + if len(combined_list) != len(set(combined_list)): + return True + return False + + def set_db_content(self, db_content): + # 过滤非 dict 格式, 或 dict 中没有定义 unique_id 的数据, 并保存到 _all_origin_timeline_op_rule_dict 中 + self._db_content = copy.deepcopy(db_content) + for rule_dic in self._db_content: + if not isinstance(rule_dic, dict) or rule_dic.get("unique_id") is None: + continue + self._all_origin_timeline_op_rule_dict[rule_dic.get("unique_id")] = rule_dic + if self._all_origin_timeline_op_rule_dict: + self.generate_all_timeline_op_rule() + + def generate_basic_timeline_op_rules(self): + """用于实现获取无全局继承规则, 无全局继承的规则认为是基础版本规则, 默认不会存在局部继承""" + for _, rule_dic in self._all_origin_timeline_op_rule_dict.items(): + if rule_dic.get("inherit_unique_id") is None: + self.add_basic_timeline_op_rule(rule_dic) + + def add_basic_timeline_op_rule(self, rule_dic): + # 若基础规则中存在局部继承的规则,则跳过 + local_inherit_id_list = self._get_local_inherit_id_list(rule_dic.get("operator_rules")) + if local_inherit_id_list: + return + + temp_rule = OpRule() + temp_rule.merge(rule_dic.get("operator_rules")) + + unique_id = rule_dic.get("unique_id") + logger.debug("The rule of version %s is basic rule.", unique_id) + self.add_new_timeline_op_rule(unique_id, temp_rule.tmp_rule) + + def add_empty_timeline_op_rule(self, unique_id): + if self._all_origin_timeline_op_rule_dict.get(unique_id) is None: + self._all_origin_timeline_op_rule_dict[unique_id] = {} + tmp_rule = {} + logger.debug("The rule of version %s is empty.", unique_id) + self.add_new_timeline_op_rule(unique_id, tmp_rule) + + def add_new_timeline_op_rule(self, unique_id, tmp_rule): + if unique_id not in self._exist_timeline_op_rule_unique_id_list: + self._exist_timeline_op_rule_unique_id_list.append(unique_id) + self._all_tmp_timeline_op_rule[unique_id] = tmp_rule + logger.debug("The rule of version %s is successfully generated.", unique_id) + + def generate_specified_list_timeline_op_rule(self, specified_unique_id_list, kid_id_list=None): + for specified_unique_id in specified_unique_id_list: + if specified_unique_id in self._exist_timeline_op_rule_unique_id_list: + self.generate_specified_timeline_op_rule(specified_unique_id, kid_id_list) + + def generate_specified_timeline_op_rule(self, specified_unique_id, kid_id_list=None): + """用于实现生成特定版本规则 + + 若不存在相应specified_unique_id的规则、或是已生成、循环继承等情况,将该规则置空并返回 + 规则库文件结构设置为多叉树, 结构决定了不断向下搜索最终应该是从基础版本开始继承, 递归生成, + 直到specified_unique_id规则依赖继承的规则库全部生成完毕, 再生成该指定规则库, 将specified_unique_id的规则库归档 + + 参数: + specified_unique_id: 指定版本规则id + kid_id_list: 子规则id数组, 用于防止循环继承, 如间接继承自身或直接继承自身等情况 + 返回: + None + """ + if kid_id_list is None: + kid_id_list = [] + + # 若该unique_id规则在timeline_fusion_ops.yaml中没有相应的规则, 生成该id规则,置为空 + if self._all_origin_timeline_op_rule_dict.get(specified_unique_id) is None: + logger.warning("The specified version %s does not exist in the rule library. " + "Ensure that the corresponding rule is configured in the YAML file. " + "The version %s is left blank.", + specified_unique_id, + specified_unique_id) + self.add_empty_timeline_op_rule(specified_unique_id) + return + + # 若该unique_id规则已经生成,则无需再次生成 + if specified_unique_id in self._exist_timeline_op_rule_unique_id_list: + logger.warning("The rule has been generated and does not need to be generated again. " + "Check whether unique id %s in the YAML file is duplicate.", + specified_unique_id) + return + + # 若kid_id_list不为空,且间接继承自身,则尝试生成空规则用于继承 + if kid_id_list and self._is_duplicated_element_in_lists(specified_unique_id, kid_id_list): + logger.warning("It cannot be inherited indirectly. Ensure that the corresponding rules are correctly " + "configured in the YAML file and leave Version %s blank.", + specified_unique_id) + self.add_empty_timeline_op_rule(specified_unique_id) + return + + rule_dic = self._all_origin_timeline_op_rule_dict.get(specified_unique_id) + if rule_dic is not None: + kid_id_list.append(specified_unique_id) + + global_inherit_id = rule_dic.get("inherit_unique_id") + if global_inherit_id and global_inherit_id not in self._exist_timeline_op_rule_unique_id_list: + logger.debug("The rule of version %s global inherit the rule of version %s", + specified_unique_id, global_inherit_id) + self.generate_specified_timeline_op_rule(global_inherit_id, kid_id_list) + + # 若局部继承的规则未生成, 生成该规则 + local_inherit_id_list = self._get_local_inherit_id_list(rule_dic.get("operator_rules")) + if local_inherit_id_list: + logger.debug("The rule of version %s local inherit the rule of version %s", + specified_unique_id, local_inherit_id_list) + self.generate_specified_list_timeline_op_rule(specified_unique_id_list=local_inherit_id_list, + kid_id_list=kid_id_list) + logger.debug("Start to generate rule of version %s", specified_unique_id) + # 实现全局继承与局部继承 + temp_rule = OpRule(timeline_op_rule_handler=self, + rule=self._all_tmp_timeline_op_rule.get(global_inherit_id)) + temp_rule.merge(rule_dic.get("operator_rules")) + # 将生成的规则归档保存 + self.add_new_timeline_op_rule(specified_unique_id, temp_rule.tmp_rule) + return + logger.error("Failed to generate the rule whose unique_id is %s. Ensure that the rule is configured in " + "the YAML file and the version %s is empty.", specified_unique_id, specified_unique_id) + self.add_empty_timeline_op_rule(specified_unique_id) + + def generate_all_timeline_op_rule(self): + """用于实现获取所有版本规则 + + 查找db_content中的规则库, 规则库文件结构设置为多叉树, 优先生成无继承的基础规则版本 + 循环并生成其他版本, 文件结构决定了不断向下搜索最终应该是从基础版本开始继承, 递归生成,直到全部规则库生成后退出函数 + + 参数: + None + 返回: + None + """ + self.generate_basic_timeline_op_rules() + _unique_id_list = copy.deepcopy(list(self._all_origin_timeline_op_rule_dict.keys())) + for unique_id in _unique_id_list: + if unique_id in self._exist_timeline_op_rule_unique_id_list: + continue + self.generate_specified_timeline_op_rule(unique_id) + + def get_tmp_timeline_op_rule_with_unique_id(self, unique_id): + if unique_id not in self._exist_timeline_op_rule_unique_id_list: + logger.error("The specified unique_id does not exist in the rule library. Ensure that the " + "corresponding rule is configured in the YAML file and the version %s is empty." + "If the value of unique_id is a negative number, the version may not be supported.", + unique_id) + self.add_empty_timeline_op_rule(unique_id) + if unique_id < 0: + logger.error("Advise to use a positive integer as the unique id of rules. " + "Negative numbers: %s are not recommended to use as unique id. " + "If specified invalid unique id: %s is used, an empty rule is returned by default.", + unique_id, constant.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID) + return self._all_tmp_timeline_op_rule.get(unique_id) diff --git a/profiler/advisor/config/config.ini b/profiler/advisor/config/config.ini index b8f6703685..7185a6b3bf 100644 --- a/profiler/advisor/config/config.ini +++ b/profiler/advisor/config/config.ini @@ -9,8 +9,8 @@ tune_ops_file = operator_tuning_file.cfg [THRESHOLD] # operator_bound_ratio: (mte, cube, vector, scalar) ratio greater than this value will be checked in operator_bound_checker operator_bound_ratio = 0.8 -[RULE] +[RULE-BUCKET] # region : URL of different regions where can download rule yaml file -cn-north-9 = https://cnnorth9-modelarts-sdk.obs.cn-north-9.myhuaweicloud.com/modelarts/solution/ma_advisor_rules/ -cn-southwest-2 = https://cnsouthwest2-modelarts-sdk.obs.cn-southwest-2.myhuaweicloud.com/modelarts/solution/ma_advisor_rules/ -cn-north-7 = https://cnnorth7-modelarts-sdk.obs.cn-north-7.ulanqab.huawei.com/modelarts/solution/ma_advisor_rules/ \ No newline at end of file +cn-north-9 = cnnorth9-modelarts-sdk +cn-southwest-2 = cnsouthwest2-modelarts-sdk +cn-north-7 = cnnorth7-modelarts-sdk \ No newline at end of file diff --git a/profiler/advisor/dataset/cluster/cluster_dataset.py b/profiler/advisor/dataset/cluster/cluster_dataset.py index ee8b3563b7..a2d91377cc 100644 --- a/profiler/advisor/dataset/cluster/cluster_dataset.py +++ b/profiler/advisor/dataset/cluster/cluster_dataset.py @@ -2,6 +2,7 @@ import logging import os +from profiler.advisor.dataset.dataset import Dataset from profiler.advisor.utils.utils import singleton from profiler.cluster_analyse.common_func.file_manager import FileManager from profiler.advisor.common import constant as const @@ -13,10 +14,10 @@ from profiler.advisor.dataset.cluster.cluster_step_trace_time_bean import Cluste logger = logging.getLogger() -class ClusterDataset: +class ClusterDataset(Dataset): - def __init__(self, collection_path, **kwargs) -> None: - self.collection_path = os.path.realpath(collection_path) + def __init__(self, collection_path, data: dict, **kwargs) -> None: + super().__init__(collection_path, data) if not self.is_cluster_analysis_output_exist(): self.cluster_analyze() @@ -64,14 +65,14 @@ class ClusterDataset: class ClusterStepTraceTimeDataSet(ClusterDataset): RANK = "rank" - def __init__(self, collection_path: str, kwargs: dict = None): - super().__init__(collection_path) + def __init__(self, collection_path: str, data: dict, kwargs: dict = None): self._step_dict = defaultdict() - self.parse() + super().__init__(collection_path, data) - def parse(self): + def _parse(self): step_data = self.load_csv_data(const.CLUSTER_STEP_TIME_CSV, ClusterStepTraceTimeBean) self._step_dict = self.formate_data(step_data) + return True def formate_data(self, step_data: list): step_dict = defaultdict(lambda: [0, 0, 0]) @@ -100,15 +101,14 @@ class ClusterCommunicationDataSet(ClusterDataset): SDMA = "SDMA" RDMA = "RDMA" - def __init__(self, collection_path: str, kwargs: dict = None): - super().__init__(collection_path) + def __init__(self, collection_path: str, data: dict, kwargs: dict = None): self.rank_bw_dict = defaultdict(lambda: { self.RDMA_TIME_MS: 0, self.RDMA_SIZE_MB: 0, self.SDMA_TIME_MS: 0, self.SDMA_SIZE_MB: 0, }) - self.parse() + super().__init__(collection_path, data) @staticmethod def compute_ratio(dividend: float, divisor: float): @@ -117,7 +117,7 @@ class ClusterCommunicationDataSet(ClusterDataset): else: return round(dividend / divisor, 4) - def parse(self): + def _parse(self): communication_json = self.load_json_data(const.CLUSTER_COMM_JSON) self.process(communication_json) diff --git a/profiler/advisor/dataset/dataset.py b/profiler/advisor/dataset/dataset.py new file mode 100644 index 0000000000..7f1e40a38b --- /dev/null +++ b/profiler/advisor/dataset/dataset.py @@ -0,0 +1,38 @@ +""" +dataset module +""" +import logging +import os + +from profiler.advisor.config.config import Config + +logger = logging.getLogger() + + +class Dataset: + """ + :param collection_path: dataSet absolute path + dataset base class + """ + + def __init__(self, collection_path, data=None) -> None: + if data is None: + data = {} + self.collection_path = os.path.abspath(os.path.join(Config().work_path, collection_path)) + logger.debug("init %s with %s", self.__class__.__name__, self.collection_path) + if self._parse(): + key = self.get_key() + if key not in data: + data[key] = [] + data[key].append(self) + + def _parse(self): + return None + + @classmethod + def get_key(cls): + """ + get key of dataset + :return: key + """ + return cls.__name__.rsplit('.', maxsplit=1)[-1] diff --git a/profiler/advisor/dataset/timeline_event_dataset.py b/profiler/advisor/dataset/timeline_event_dataset.py index c1134a9784..9b4c102dff 100644 --- a/profiler/advisor/dataset/timeline_event_dataset.py +++ b/profiler/advisor/dataset/timeline_event_dataset.py @@ -2,6 +2,7 @@ import logging from typing import List import ijson +from profiler.advisor.dataset.dataset import Dataset from tqdm import tqdm from profiler.advisor.common import constant as const @@ -13,17 +14,17 @@ logger = logging.getLogger() @singleton -class TimelineEventDataset: +class TimelineEventDataset(Dataset): - def __init__(self, root_dir, **kwargs) -> None: + def __init__(self, collection_path, data: dict, **kwargs) -> None: self._ops_with_task_type = {} self._ops_with_stack = {} self._torch_to_npu = {} self._acl_to_npu = set() self._aten: List[str] = [] self._optimizer: List[str] = [] - self.timeline_dir = root_dir - self.timeline_data_list = get_file_path_from_directory(root_dir, lambda file: file.endswith("trace_view.json")) + self.timeline_dir = collection_path + self.timeline_data_list = get_file_path_from_directory(collection_path, lambda file: file.endswith("trace_view.json")) self.dataset_len = None self.analysis_mode = kwargs.get("analysis_mode") self.task_type = kwargs.get("task_type") @@ -34,13 +35,14 @@ class TimelineEventDataset: logger.info("Load fusion operators database for cann version '%s' and torch version '%s'", self.cann_version, self.torch_version) - self.parse() + super().__init__(collection_path, data) if self.analysis_mode in ["op_stack", "all"]: self._task_op_names = list(set([event_key.split("-")[0] for event_key in self._ops_with_task_type.keys()])) self._post_process() + @property def ops_with_stack(self): return self._ops_with_stack @@ -69,23 +71,15 @@ class TimelineEventDataset: def aten(self): return self._aten - @classmethod - def get_key(cls): - """ - get key of dataset - :return: key - """ - return cls.__module__.rsplit('.', maxsplit=1)[-1] - - def parse(self): + def _parse(self): if len(self.timeline_data_list) == 0: logger.warning("Please ensure trace_view.json in %s, skip timeline analysis.", self.timeline_dir) return False if len(self.timeline_data_list) > 1: - logger.warning("Please ensure only one trace_view.json in %s, skip timeline analysis.", self.timeline_dir) - return False + logger.warning("Please ensure only one trace_view.json in %s, there will analysis first timeline profiling data.", self.timeline_dir) + self.timeline_data_list = [self.timeline_data_list[0]] result = self.parse_data_with_generator(self._add_event) diff --git a/profiler/advisor/interface/interface.py b/profiler/advisor/interface/interface.py index 19da350a02..156922f4d1 100644 --- a/profiler/advisor/interface/interface.py +++ b/profiler/advisor/interface/interface.py @@ -1,47 +1,44 @@ import os -from profiler.advisor.analyzer.scheduling.fusion_ops.fusion_ops_analyzer import TimelineFusionOpsAnalyzer -from profiler.advisor.analyzer.overall.overall_analyzer import OverallSummaryAnalyzer -from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset +from profiler.advisor.analyzer.schedule.fusion_ops.fusion_ops_analyzer import TimelineFusionOpsAnalyzer from profiler.advisor.utils.utils import Timer -from profiler.advisor.result.result import OptimizeResult from profiler.advisor.analyzer.cluster.slow_rank_analyser import SlowRankAnalyzer from profiler.advisor.analyzer.cluster.slow_link_analyser import SlowLinkAnalyzer class Interface: - supported_analysiser = { - "computing": [], - "scheduling": [TimelineFusionOpsAnalyzer], + supported_analyzer = { + "schedule": [TimelineFusionOpsAnalyzer], + "computation": [], "communication": [], "overall": [], "dataloader": [], "cluster": [SlowRankAnalyzer, SlowLinkAnalyzer] } - all_dimension = supported_analysiser.keys() + all_dimension = supported_analyzer.keys() def __init__(self, **kwargs): self.collection_path = os.path.realpath(kwargs.get("profiling_path")) @staticmethod - def get_analyzer(dimension, is_inference=False): - return Interface.supported_analysiser.get(dimension, []) + def get_analyzer(dimension): + return Interface.supported_analyzer.get(dimension, []) def get_result(self: any, dimension: str, render_html=False, **kwargs): """ :Param mode: affinity apis, ai cpu and so on. """ result_list = [] - analysiser_list = self.get_analyzer(dimension, kwargs.get("is_inference", False)) - for idx, clss in enumerate(analysiser_list): + analyzer_list = self.get_analyzer(dimension) + for idx, clss in enumerate(analyzer_list): if clss and callable(clss): - analysiser = clss(collection_path = self.collection_path, **kwargs) - result_list.append(analysiser.optimize()) - if render_html and idx == len(analysiser_list) - 1: - if hasattr(analysiser, "html_render"): - analysiser.html_render.render_html() - analysiser.html_render.save_to_file(f'att_advisor_{Timer().strftime}.html') + analyzer = clss(collection_path = self.collection_path, **kwargs) + result_list.append(analyzer.optimize(**kwargs)) + if render_html and idx == len(analyzer_list) - 1: + if hasattr(analyzer, "html_render"): + analyzer.html_render.render_html() + analyzer.html_render.save_to_file(f'att_advisor_{Timer().strftime}.html') return result_list diff --git a/profiler/advisor/utils/utils.py b/profiler/advisor/utils/utils.py index d7837e1e40..1a4444f1ec 100644 --- a/profiler/advisor/utils/utils.py +++ b/profiler/advisor/utils/utils.py @@ -1,499 +1,549 @@ -import json -import logging -import multiprocessing as mp -import os -import queue -import re -import stat -import time -import traceback -import types -from functools import wraps -from typing import Any, Set - -import click -import requests -from requests.adapters import HTTPAdapter -from tqdm import tqdm - -from profiler.advisor.common import constant as const -from profiler.advisor.common.timeline.fusion_ops_db import FusionOperatorDB -from profiler.advisor.common.version_control import VersionControl -from profiler.advisor.utils.log import init_logger, get_log_level - -logger = logging.getLogger() -logger.setLevel(get_log_level()) -permission_warned: Set = set() - - -def ignore_warning(exception: Exception = None): - return exception - - -class ContextObject(object): - def __init__(self): - self._debug = False - - def set_debug(self, debug=False): - self._debug = debug - - @property - def debug_mode(self): - return self._debug - - -def debug_option(f): - return click.option('--debug', '-D', - is_flag=True, - expose_value=False, - is_eager=True, - callback=init_logger, - help="Debug Mode. Shows full stack trace when error occurs.")(f) - - -def singleton(cls): - """ - :param cls: any class - :return: singleton handle - """ - _instance = {} - - def _singleton(*args: any, **kw: any) -> any: - if cls not in _instance: - _instance[cls] = cls(*args, **kw) - return _instance.get(cls) - - return _singleton - - -def lazy_property(func): - """ - Lazy loading of class attributes. - which is calculated only once when it is called for the first time, - and will not be repeated for each call after that. - """ - attr_name = "_lazy_" + func.__name__ - - @property - def _lazy_property(instance): - if not hasattr(instance, attr_name): - setattr(instance, attr_name, func(instance)) - return getattr(instance, attr_name) - - return _lazy_property - - -class CheckPathAccess: - """ - check path access permissions - """ - - # pylint: disable=no-member - def __init__(self, func): - wraps(func)(self) - self.warned = permission_warned - - def __call__(self, *args, **kwargs): - path = args[0] - if not os.access(path, os.R_OK) and path not in self.warned: - logger.warning("%s can not read, check the permissions", path) - self.warned.add(path) - return self.__wrapped__(*args, **kwargs) - - def __get__(self, instance, cls): - if instance is None: - return self - return types.MethodType(self, instance) - - -def walk_error_handler(error): - """ - handle dir walk error - """ - if error.filename not in permission_warned: - logger.warning(error) - permission_warned.add(error.filename) - - -@CheckPathAccess -def get_file_path_from_directory(path: str, check_func: Any) -> list: - """ - get file from directory - """ - file_list = [] - for root, _, files in os.walk(path, onerror=walk_error_handler): - for filename in files: - filepath = os.path.join(root, filename) - if check_func(filename): - file_list.append(filepath) - return file_list - - -@singleton -class Timer: - def __init__(self): - self.strftime = time.strftime("%Y%m%d%H%M%S", time.localtime(time.time())) - - -def get_analyze_processes(): - # n_processes not exposed to user through ma-advisor command arguments now - return min(int(os.getenv(const.MA_ADVISOR_ANALYZE_PROCESSES, 1)), const.MA_ADVISOR_MAX_PROCESSES) - - -def init_timeline_ops_db(cann_version=None, torch_version=None): - logger.debug("init operators database") - - return FusionOperatorDB(cann_version=cann_version, torch_version=torch_version) - - -def format_timeline_result(result: dict, dump_html=False): - """ - :Param result: json for api name and stack - :Return: json after format - """ - format_result = {} - if dump_html: - result = json.loads(json.dumps(result).replace("\\r\\n", "
").replace("", "<module>")) - - for key, stacks in result.items(): - api_name = key.split(":")[0] - format_result[api_name] = sorted(list(stacks.items()), key=lambda stack: stack[1], reverse=True) - return format_result - - -class ParallelJob: - - def __init__(self, src_func, ops_api_list, job_name=None): - if not callable(src_func): - raise TypeError(f"src_func should be callable") - - if not isinstance(ops_api_list, (list, tuple)): - raise TypeError(f"ops_api_list should be list or tuple") - - self.src_func = src_func - self.ops_api_list = ops_api_list - self.job_name = job_name - - def start(self, n_proccesses): - - queue = mp.Queue(len(self.ops_api_list)) - completed_queue = mp.Queue() - for i in range(len(self.ops_api_list)): - queue.put(i) - - processes = [] - listen = mp.Process(target=self.listener, args=(completed_queue, len(self.ops_api_list),)) - listen.start() - - for i in range(n_proccesses): - p = mp.Process(target=self.parallel_queue, args=(queue, completed_queue,)) - processes.append(p) - p.start() - - for p in processes: - p.join() - - completed_queue.put(None) - listen.join() - - def listener(self, completed_queue, num): - pbar = tqdm(total=num, position=0, leave=False, ncols=100, desc=self.job_name) - for _ in iter(completed_queue.get, None): - pbar.update() - pbar.refresh() - pbar.n = num - - def parallel_queue(self, job_queue, completed_queue): - while True: - try: - if job_queue.empty(): - break - token = job_queue.get(timeout=1) - except queue.Empty: - continue - self.src_func(*self.ops_api_list[token]) - completed_queue.put(token) - - -def mp_queue_to_list(job_queue): - queue_list = [] - while True: - try: - if job_queue.empty(): - break - token = job_queue.get(timeout=1) - queue_list.append(token) - except queue.Empty: - continue - return queue_list - - -def load_parameter(parameter, default): - if not os.environ.get(parameter, None): - return default - else: - return os.environ.get(parameter) - - -def get_supported_subclass(clazz: VersionControl.__class__, cann_version: str): - """ - Returns a list of subclasses that support the specified version - :param clazz: Class name which is extends to VersionControl.__class__ - :param cann_version: The CANN software version - :return: The list of subclasses that support the specified CANN version - """ - # 获取所有支持这个cann版本的子类 - dataset_classes = clazz.__subclasses__() - sub_class_list = [cls for cls in dataset_classes if cls.is_supported(cann_version)] - logger.debug("The support subclass list is %s, cann version is %s", str(sub_class_list), cann_version) - return sub_class_list - - -def to_percent(num: float) -> str: - """ - change float to percent format - """ - num = num * 100 - return f"{num:.2f}%" - - -def safe_division(numerator, denominator): - """Return 0 if denominator is 0.""" - return denominator and numerator / denominator - - -def safe_write(content, save_path): - if os.path.dirname(save_path) != "": - os.makedirs(os.path.dirname(save_path), exist_ok=True) - - with os.fdopen(os.open(save_path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, - stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP), "w") as f: - f.write(content) - - -def create_directory_for_file(file: str) -> None: - """ - create directory for file - """ - dirname = os.path.dirname(file) - if not os.path.exists(dirname): - os.makedirs(dirname) - - -class CheckPathAccess: - """ - check path access permissions - """ - - # pylint: disable=no-member - def __init__(self, func): - wraps(func)(self) - self.warned = permission_warned - - def __call__(self, *args, **kwargs): - path = args[0] - if path and not os.access(path, os.R_OK) and path not in self.warned: - logger.warning("%s can not read, check the permissions", path) - self.warned.add(path) - return self.__wrapped__(*args, **kwargs) - - def __get__(self, instance, cls): - if instance is None: - return self - return types.MethodType(self, instance) - - -@CheckPathAccess -def get_file_path_from_directory(path, check_func): - """ - get file from directory - """ - file_list = [] - - if not path: - return file_list - - if not os.path.isdir(path): - logger.warning("Expected existed directory, but got %s", path) - - for root, _, files in os.walk(path): - for filename in files: - filepath = os.path.join(root, filename) - if check_func(filename): - file_list.append(filepath) - return file_list - - -@CheckPathAccess -def get_dir_path_from_directory(path: str, check_func: Any) -> list: - """ - get file from directory - """ - file_list = [] - for root, _, files in os.walk(path, onerror=walk_error_handler): - for filename in files: - filepath = os.path.join(root, filename) - if check_func(filename): - file_list.append(filepath) - return file_list - - -def is_regex_pattern(string: str): - """ - Check if str is a regular expression. - """ - escaped_string = re.escape(string) - return not (escaped_string == string) - - -def join_prof_path(root_dir: str, sub_dir: str) -> str: - """ - regular expression matching method for path concatenation - """ - if is_regex_pattern(sub_dir): - for root, _, _ in os.walk(root_dir, onerror=walk_error_handler): - if re.match(sub_dir, os.path.basename(root)): - return root - else: - sub_dir = os.path.join(root_dir, sub_dir) - if os.path.exists(sub_dir): - return sub_dir - return "" - - -def format_excel_title(title: str) -> str: - """ - format excel title - """ - title = title.lower() - title = title.replace("(us)", '') - title = title.replace("(ns)", '') - title = title.replace("(%)", '') - title = title.replace(" ", "_") - return title - - -def format_float(num: float) -> float: - """ - format float num, round to 2 decimal places - """ - return round(num, 2) - - -class SafeOpen: - """ - safe open to check file - """ - - # pylint: disable=consider-using-with - def __init__(self, name, mode='r', encoding=None): - self.file = None - if not os.path.exists(name): - logger.warning("%s not exist, please check", name) - return - - if os.access(name, os.R_OK): - self.file = open(name, mode, encoding=encoding, errors="ignore") - else: - logger.warning("%s can not read, check the permissions", name) - - def __enter__(self): - return self.file - - def __exit__(self, exc_type, exc_val, exc_tb): - if self.file: - self.file.close() - return True - - -def save_downloaded_file(response, url_path, file_save_path): - """保存响应体中的文件 - - 参数: - response: 请求后获取的响应体 - url_path: url路径 - file_save_path: 保存路径 - 返回: - final_file_path: 文件保存绝对路径 - """ - # 获取url路径中的文件名, 拼接在保存路径下 - file_save_path = os.path.normpath(file_save_path) - file_name = os.path.basename(url_path) - final_file_path = os.path.join(file_save_path, file_name) - # 若目标保存路径不存在,则自动生成 - if not os.path.exists(file_save_path): - os.makedirs(file_save_path) - if response.status_code <= 300: - logger.debug("Response status code is %s", response.status_code) - flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL - modes = stat.S_IWUSR | stat.S_IRUSR - # 若文件已存在,则移除已有的文件并保存最新的文件 - if os.path.exists(final_file_path): - os.remove(final_file_path) - # 保存文件 - with os.fdopen(os.open(final_file_path, flags, modes), mode="wb") as f: - f.write(response.content) - logger.info("Success to save content in: %s", os.path.abspath(final_file_path)) - else: - # 若响应码不为预期的数值, 显示相应告警 - logger.warning("Failed to save the response body. The response status code is %s. " - "Please check the network or file URL", response.status_code) - - -def request_with_retry(url_path): - """使用requests请求获取文件, 失败则进行重试, 最多请求 max_retries+1 次 - - 参数: - url_path: URL路径 - file_save_path: 云文件保存路径 - """ - logger.debug("Requesting or retrying to get %s", url_path) - - # 若从环境变量指定了保存路径,优先从环境变量中获取,若为空则使用默认的云文件保存路径constant.CLOUD_RULE_PATH - file_save_path = os.path.join(os.path.expanduser("~"), const.CLOUD_RULE_PATH) - if os.getenv(const.ADVISOR_RULE_PATH): - file_save_path = os.getenv(const.ADVISOR_RULE_PATH) - - session = requests.Session() - # 使用session发起的所有请求, 默认最多会重试 max_retries 次, 计入最初请求, 最差情况下请求 max_retries+1 次 - adapter = HTTPAdapter(max_retries=const.MAX_RETRIES) - session.mount('http://', adapter) - session.mount('https://', adapter) - - logger.debug('Session try to get response') - response = None - try: - response = session.get(url_path, timeout=const.TIMEOUT) - except Exception as e: - logger.debug("Error: %s: %s", e, traceback.format_exc()) - - if response is None: - logger.warning("Fail to download: %s, response is None, " - "please use the environment variable %s for more detailed information", - url_path, const.ADVISOR_LOG_LEVEL) - else: - try: - # 若响应码为400~600之间,response.raise_for_status抛出HTTPError错误, 跳过调用save_downloaded_file函数逻辑 - response.raise_for_status() - save_downloaded_file(response, url_path=url_path, file_save_path=file_save_path) - except Exception as e: - logger.warning("Error: %s: %s", e, traceback.format_exc()) - # 关闭 session, 清除所有装配器 - session.close() - - -def read_csv(file): - import csv - - raw_data = [] - logger.debug("Parse file %s", file) - with SafeOpen(file, encoding="utf-8") as csv_file: - try: - csv_content = csv.reader(csv_file) - for row in csv_content: - raw_data.append(row) - except OSError as error: - logger.error("Read csv file failed : %s", error) - return [] - - return raw_data +import inspect +import json +import logging +import multiprocessing as mp +import os +import queue +import re +import stat +import time +import traceback +import types +from functools import wraps +from typing import Any, Set + +import click +import requests +from requests.adapters import HTTPAdapter +from tqdm import tqdm + +from profiler.advisor.common import constant as const +from profiler.advisor.common.version_control import VersionControl +from profiler.advisor.utils.log import init_logger, get_log_level + +logger = logging.getLogger() +logger.setLevel(get_log_level()) +permission_warned: Set = set() + + +def ignore_warning(exception: Exception = None): + return exception + + +class ContextObject(object): + def __init__(self): + self._debug = False + + def set_debug(self, debug=False): + self._debug = debug + + @property + def debug_mode(self): + return self._debug + + +def debug_option(f): + return click.option('--debug', '-D', + is_flag=True, + expose_value=False, + is_eager=True, + callback=init_logger, + help="Debug Mode. Shows full stack trace when error occurs.")(f) + + +def get_class_absolute_path(cls): + module = inspect.getmodule(cls) + if module is not None: + module_path = module.__name__ + class_name = cls.__name__ + return f"{module_path}.{class_name}" + else: + return None + + +def is_static_func(function_obj): + return isinstance(function_obj, staticmethod) + + +def singleton(cls): + """ + :param cls: any class + :return: singleton handle + + When using the singleton function, you need to manually specify arg='dataSet_path'. Otherwise, the singleton function + is initialized by class name. + if cls has 'arg' property, _instance map will build by class_name and 'arg', the default value of + collection path is class absolute path. + + _instance = {cls.name: {collection_path: instance}} + """ + _instance = {} + + def _singleton(*args: any, **kw: any) -> any: + collection_path = kw.get("collection_path") + if not collection_path: + collection_path = get_class_absolute_path(cls) + if cls in _instance and collection_path in _instance[cls]: + return _instance[cls].get(collection_path) + if cls not in _instance: + _instance[cls] = {collection_path: cls(*args, **kw)} + else: + _instance[cls][collection_path] = cls(*args, **kw) + return _instance[cls].get(collection_path) + + # 保留原始类的属性和方法 + _singleton.__name__ = cls.__name__ + _singleton.__module__ = cls.__module__ + _singleton.__doc__ = cls.__doc__ + + # 拷贝原始类的类方法和静态方法 + _singleton.__dict__.update(cls.__dict__) + for base_class in inspect.getmro(cls)[::-1]: + # 获取类的所有成员 + members = inspect.getmembers(base_class) + + # 过滤出函数对象 + function_objs = [member[1] for member in members if inspect.isfunction(member[1]) or inspect.ismethod(member[1])] + for function_obj in function_objs: + if inspect.isfunction(function_obj) and not is_static_func(function_obj): + continue + setattr(_singleton, function_obj.__name__, function_obj) + + return _singleton + + +def lazy_property(func): + """ + Lazy loading of class attributes. + which is calculated only once when it is called for the first time, + and will not be repeated for each call after that. + """ + attr_name = "_lazy_" + func.__name__ + + @property + def _lazy_property(instance): + if not hasattr(instance, attr_name): + setattr(instance, attr_name, func(instance)) + return getattr(instance, attr_name) + + return _lazy_property + + +class CheckPathAccess: + """ + check path access permissions + """ + + # pylint: disable=no-member + def __init__(self, func): + wraps(func)(self) + self.warned = permission_warned + + def __call__(self, *args, **kwargs): + path = args[0] + if not os.access(path, os.R_OK) and path not in self.warned: + logger.warning("%s can not read, check the permissions", path) + self.warned.add(path) + return self.__wrapped__(*args, **kwargs) + + def __get__(self, instance, cls): + if instance is None: + return self + return types.MethodType(self, instance) + + +def walk_error_handler(error): + """ + handle dir walk error + """ + if error.filename not in permission_warned: + logger.warning(error) + permission_warned.add(error.filename) + + +@CheckPathAccess +def get_file_path_from_directory(path: str, check_func: Any) -> list: + """ + get file from directory + """ + file_list = [] + for root, _, files in os.walk(path, onerror=walk_error_handler): + for filename in files: + filepath = os.path.join(root, filename) + if check_func(filename): + file_list.append(filepath) + return file_list + + +@singleton +class Timer: + def __init__(self): + self.strftime = time.strftime("%Y%m%d%H%M%S", time.localtime(time.time())) + + +def get_analyze_processes(): + # n_processes not exposed to user through att-advisor command arguments now + return min(int(os.getenv(const.MA_ADVISOR_ANALYZE_PROCESSES, 1)), const.MA_ADVISOR_MAX_PROCESSES) + + +def format_timeline_result(result: dict, dump_html=False): + """ + :Param result: json for api name and stack + :Return: json after format + """ + format_result = {} + if dump_html: + result = json.loads(json.dumps(result).replace("\\r\\n", "
").replace("", "<module>")) + + for key, stacks in result.items(): + api_name = key.split(":")[0] + format_result[api_name] = sorted(list(stacks.items()), key=lambda stack: stack[1], reverse=True) + return format_result + + +class ParallelJob: + + def __init__(self, src_func, ops_api_list, job_name=None): + if not callable(src_func): + raise TypeError(f"src_func should be callable") + + if not isinstance(ops_api_list, (list, tuple)): + raise TypeError(f"ops_api_list should be list or tuple") + + self.src_func = src_func + self.ops_api_list = ops_api_list + self.job_name = job_name + + def start(self, n_proccesses): + + job_queue = mp.Queue(len(self.ops_api_list)) + completed_queue = mp.Queue() + for i in range(len(self.ops_api_list)): + job_queue.put(i) + + processes = [] + listen = mp.Process(target=self.listener, args=(completed_queue, len(self.ops_api_list),)) + listen.start() + + for i in range(n_proccesses): + p = mp.Process(target=self.parallel_queue, args=(job_queue, completed_queue,)) + processes.append(p) + p.start() + + for p in processes: + p.join() + + completed_queue.put(None) + listen.join() + + def listener(self, completed_queue, num): + pbar = tqdm(total=num, position=0, leave=False, ncols=100, desc=self.job_name) + for _ in iter(completed_queue.get, None): + pbar.update() + pbar.refresh() + pbar.n = num + + def parallel_queue(self, job_queue, completed_queue): + while True: + try: + if job_queue.empty(): + break + token = job_queue.get(timeout=1) + except queue.Empty: + continue + self.src_func(*self.ops_api_list[token]) + completed_queue.put(token) + + +def mp_queue_to_list(job_queue): + queue_list = [] + while True: + try: + if job_queue.empty(): + break + token = job_queue.get(timeout=1) + queue_list.append(token) + except queue.Empty: + continue + return queue_list + + +def load_parameter(parameter, default): + if not os.environ.get(parameter, None): + return default + else: + return os.environ.get(parameter) + + +def get_supported_subclass(clazz: VersionControl.__class__, cann_version: str): + """ + Returns a list of subclasses that support the specified version + :param clazz: Class name which is extends to VersionControl.__class__ + :param cann_version: The CANN software version + :return: The list of subclasses that support the specified CANN version + """ + # 获取所有支持这个cann版本的子类 + dataset_classes = clazz.__subclasses__() + sub_class_list = [cls for cls in dataset_classes if cls.is_supported(cann_version)] + logger.debug("The support subclass list is %s, cann version is %s", str(sub_class_list), cann_version) + return sub_class_list + + +def to_percent(num: float) -> str: + """ + change float to percent format + """ + num = num * 100 + return f"{num:.2f}%" + + +def safe_division(numerator, denominator): + """Return 0 if denominator is 0.""" + return denominator and numerator / denominator + + +def safe_write(content, save_path): + if os.path.dirname(save_path) != "": + os.makedirs(os.path.dirname(save_path), exist_ok=True) + + with os.fdopen(os.open(save_path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, + stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP), "w") as f: + f.write(content) + + +def create_directory_for_file(file: str) -> None: + """ + create directory for file + """ + dirname = os.path.dirname(file) + if not os.path.exists(dirname): + os.makedirs(dirname) + + +class CheckPathAccess: + """ + check path access permissions + """ + + # pylint: disable=no-member + def __init__(self, func): + wraps(func)(self) + self.warned = permission_warned + + def __call__(self, *args, **kwargs): + path = args[0] + if path and not os.access(path, os.R_OK) and path not in self.warned: + logger.warning("%s can not read, check the permissions", path) + self.warned.add(path) + return self.__wrapped__(*args, **kwargs) + + def __get__(self, instance, cls): + if instance is None: + return self + return types.MethodType(self, instance) + + +@CheckPathAccess +def get_file_path_from_directory(path, check_func): + """ + get file from directory + """ + file_list = [] + + if not path: + return file_list + + if not os.path.isdir(path): + logger.warning("Expected existed directory, but got %s", path) + + for root, _, files in os.walk(path): + for filename in files: + filepath = os.path.join(root, filename) + if check_func(filename): + file_list.append(filepath) + return file_list + + +@CheckPathAccess +def get_dir_path_from_directory(path: str, check_func: Any) -> list: + """ + get file from directory + """ + file_list = [] + for root, _, files in os.walk(path, onerror=walk_error_handler): + for filename in files: + filepath = os.path.join(root, filename) + if check_func(filename): + file_list.append(filepath) + return file_list + + +def is_regex_pattern(string: str): + """ + Check if str is a regular expression. + """ + escaped_string = re.escape(string) + return not (escaped_string == string) + + +def join_prof_path(root_dir: str, sub_dir: str) -> str: + """ + regular expression matching method for path concatenation + """ + if is_regex_pattern(sub_dir): + for root, _, _ in os.walk(root_dir, onerror=walk_error_handler): + if re.match(sub_dir, os.path.basename(root)): + return root + else: + sub_dir = os.path.join(root_dir, sub_dir) + if os.path.exists(sub_dir): + return sub_dir + return "" + + +def format_excel_title(title: str) -> str: + """ + format excel title + """ + title = title.lower() + title = title.replace("(us)", '') + title = title.replace("(ns)", '') + title = title.replace("(%)", '') + title = title.replace(" ", "_") + return title + + +def format_float(num: float) -> float: + """ + format float num, round to 2 decimal places + """ + return round(num, 2) + + +class SafeOpen: + """ + safe open to check file + """ + + # pylint: disable=consider-using-with + def __init__(self, name, mode='r', encoding=None): + self.file = None + if not os.path.exists(name): + logger.warning("%s not exist, please check", name) + return + + if os.access(name, os.R_OK): + self.file = open(name, mode, encoding=encoding, errors="ignore") + else: + logger.warning("%s can not read, check the permissions", name) + + def __enter__(self): + return self.file + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.file: + self.file.close() + return True + + +def save_downloaded_file(response, url_path, file_save_path): + """保存响应体中的文件 + + 参数: + response: 请求后获取的响应体 + url_path: url路径 + file_save_path: 保存路径 + 返回: + final_file_path: 文件保存绝对路径 + """ + # 获取url路径中的文件名, 拼接在保存路径下 + file_save_path = os.path.normpath(file_save_path) + file_name = os.path.basename(url_path) + final_file_path = os.path.join(file_save_path, file_name) + # 若目标保存路径不存在,则自动生成 + if not os.path.exists(file_save_path): + os.makedirs(file_save_path) + if response.status_code <= 300: + logger.debug("Response status code is %s", response.status_code) + flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL + modes = stat.S_IWUSR | stat.S_IRUSR + # 若文件已存在,则移除已有的文件并保存最新的文件 + if os.path.exists(final_file_path): + os.remove(final_file_path) + # 保存文件 + with os.fdopen(os.open(final_file_path, flags, modes), mode="wb") as f: + f.write(response.content) + logger.info("Success to save content in: %s", os.path.abspath(final_file_path)) + else: + # 若响应码不为预期的数值, 显示相应告警 + logger.warning("Failed to save the response body. The response status code is %s. " + "Please check the network or try another region", response.status_code) + + +def request_with_retry(url_path, region_name=None): + """使用requests请求获取文件, 失败则进行重试, 最多请求 max_retries+1 次 + + 参数: + url_path: URL路径 + file_save_path: 云文件保存路径 + """ + logger.debug("Requesting or retrying to get file from region: %s", region_name) + + # 若从环境变量指定了保存路径,优先从环境变量中获取,若为空则使用默认的云文件保存路径constant.CLOUD_RULE_PATH + file_save_path = os.path.join(os.path.expanduser("~"), const.CLOUD_RULE_PATH) + if os.getenv(const.ADVISOR_RULE_PATH): + file_save_path = os.getenv(const.ADVISOR_RULE_PATH) + + session = requests.Session() + # 使用session发起的所有请求, 默认最多会重试 max_retries 次, 计入最初请求, 最差情况下请求 max_retries+1 次 + adapter = HTTPAdapter(max_retries=const.MAX_RETRIES) + session.mount(const.HTTP_PREFIXES, adapter) + session.mount(const.HTTPS_PREFIXES, adapter) + + logger.debug('Session try to get response') + response = None + try: + response = session.get(url_path, timeout=const.TIMEOUT) + except Exception as e: + logger.debug("Error: %s: %s", e, traceback.format_exc()) + + if response is None: + logger.warning("Fail to download file from region: %s, response is None, " + "please use the environment variable %s for more detailed information", + region_name, const.ADVISOR_LOG_LEVEL) + else: + try: + # 若响应码为400~600之间,response.raise_for_status抛出HTTPError错误, 跳过调用save_downloaded_file函数逻辑 + response.raise_for_status() + save_downloaded_file(response, url_path=url_path, file_save_path=file_save_path) + except Exception as e: + logger.warning("Error: %s: %s", e, traceback.format_exc()) + # 关闭 session, 清除所有装配器 + session.close() + + +def read_csv(file): + import csv + + raw_data = [] + logger.debug("Parse file %s", file) + with SafeOpen(file, encoding="utf-8") as csv_file: + try: + csv_content = csv.reader(csv_file) + for row in csv_content: + raw_data.append(row) + except OSError as error: + logger.error("Read csv file failed : %s", error) + return [] + + return raw_data + + +def get_file_path_by_walk(root, filename): + file_path = "" + for root, _, files in os.walk(root, topdown=True): + for name in files: + if name == filename: + file_path = os.path.join(root, name) + return file_path + return file_path diff --git a/profiler/cli/analyze_cli.py b/profiler/cli/analyze_cli.py index 2efecffcb7..95d8fa2f53 100644 --- a/profiler/cli/analyze_cli.py +++ b/profiler/cli/analyze_cli.py @@ -19,8 +19,8 @@ def _analyze(dimensions, **kwargs): job_list = [] for dimension in dimensions: - interface = Interface(**kwargs) - job_list.append((dimension, interface)) + interface = Interface(**kwargs) + job_list.append((dimension, interface)) for i, (dimension, interface) in enumerate(job_list[::-1]): result_list += interface.get_result(dimension, render_html=i == len(job_list) - 1) @@ -41,8 +41,9 @@ def analyze_cli(**kwargs): name="all", short_help='Analyze timeline, operators and graph.') @click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, - help='path of trace_view.json in profiling') -@click.option('--benchmark_profiling_path', '-bp', 'benchmark_profiling_path', type=click.Path()) + help='Directory of profiling data') +@click.option('--benchmark_profiling_path', '-bp', 'benchmark_profiling_path', type=click.Path(), + help='Directory of benchmark profiling data, used for compare performance') @click.option('--cann_version', '-cv', 'cann_version', type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), default=constant.DEFAULT_CANN_VERSION, @@ -52,7 +53,14 @@ def analyze_cli(**kwargs): type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), default=constant.DEFAULT_TORCH_VERSION, help='The runtime torch version, which can be detected by exec command "pip show torch"') -@click.option('--is_inference', is_flag=True) +# @click.option('--is_inference', is_flag=True, help="Enable performance analysis of inference task") +@click.option("-pt", + "--profiling_type", + metavar="", + default=constant.ASCEND_PYTORCH_PROFILER, + required=False, + type=click.Choice(constant.SUPPORTED_PROFILING_TYPE), + help="enter the profiling type, selectable range ascend_pytorch_profiler, mslite ,msprof") @debug_option def analyze_all(**kwargs) -> None: # 当前compare_tools必须输入两个profiling路径,att-advisor有等价功能支持输入一个Profiling路径,后续替换成对应实现 @@ -66,8 +74,7 @@ def analyze_all(**kwargs) -> None: name="communication", short_help='Analyze timeline, operators and graph.') @click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, - help='path of trace_view.json in profiling') -@click.option('--benchmark_profiling_path', '-bp', 'benchmark_profiling_path', type=click.Path()) + help='Directory of profiling data') @click.option('--cann_version', '-cv', 'cann_version', type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), default=constant.DEFAULT_CANN_VERSION, @@ -77,19 +84,16 @@ def analyze_all(**kwargs) -> None: type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), default=constant.DEFAULT_TORCH_VERSION, help='The runtime torch version, which can be detected by exec command "pip show torch"') -@click.option('--mode', '-m', 'mode', default=None) -@click.option('--is_inference', is_flag=True) @debug_option def analyze_communication(**kwargs) -> None: _analyze(["communication"], **kwargs) @analyze_cli.command(context_settings=CONTEXT_SETTINGS, - name="scheduling", + name="schedule", short_help='Analyze timeline, operators and graph.') @click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, - help='path of trace_view.json in profiling') -@click.option('--benchmark_profiling_path', '-bp', 'benchmark_profiling_path', type=click.Path()) + help='Directory of profiling data') @click.option('--cann_version', '-cv', 'cann_version', type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), default=constant.DEFAULT_CANN_VERSION, @@ -99,19 +103,16 @@ def analyze_communication(**kwargs) -> None: type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), default=constant.DEFAULT_TORCH_VERSION, help='The runtime torch version, which can be detected by exec command "pip show torch"') -@click.option('--mode', '-m', 'mode', default=None) -@click.option('--is_inference', is_flag=True) @debug_option -def analyze_scheduling(**kwargs) -> None: - _analyze(["scheduling"], **kwargs) +def analyze_schedule(**kwargs) -> None: + _analyze(["schedule"], **kwargs) @analyze_cli.command(context_settings=CONTEXT_SETTINGS, - name="computing", + name="computation", short_help='Analyze timeline, operators and graph.') @click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, - help='path of trace_view.json in profiling') -@click.option('--benchmark_profiling_path', '-bp', 'benchmark_profiling_path', type=click.Path()) + help='Directory of profiling data') @click.option('--cann_version', '-cv', 'cann_version', type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), default=constant.DEFAULT_CANN_VERSION, @@ -121,8 +122,33 @@ def analyze_scheduling(**kwargs) -> None: type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), default=constant.DEFAULT_TORCH_VERSION, help='The runtime torch version, which can be detected by exec command "pip show torch"') -@click.option('--mode', '-m', 'mode', default=None) -@click.option('--is_inference', is_flag=True) +@click.option("-pt", + "--profiling_type", + metavar="", + default=constant.ASCEND_PYTORCH_PROFILER, + required=False, + type=click.Choice(constant.SUPPORTED_PROFILING_TYPE), + help="enter the profiling type, selectable range ascend_pytorch_profiler, mslite ,msprof") @debug_option -def analyze_computing(**kwargs) -> None: - _analyze(["computing"], **kwargs) +def analyze_computation(**kwargs) -> None: + _analyze(["computation"], **kwargs) + + +@analyze_cli.command(context_settings=CONTEXT_SETTINGS, + name="dataloader", + short_help='Analyze timeline, operators and graph.') +@click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, + help='Directory of profiling data') +@click.option('--cann_version', '-cv', 'cann_version', + type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), + default=constant.DEFAULT_CANN_VERSION, + help='The CANN software version, which can be viewed by executing the following command: ' + '"cat /usr/local/Ascend/ascend-toolkit/latest/aarch64-linux/ascend_toolkit_install.info"') +@click.option('--torch_version', '-tv', 'torch_version', + type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), + default=constant.DEFAULT_TORCH_VERSION, + help='The runtime torch version, which can be detected by exec command "pip show torch"') +@click.option('--is_inference', is_flag=True, help="Enable performance analysis of inference task") +@debug_option +def analyze_dataloader(**kwargs) -> None: + _analyze(["dataloader"], **kwargs) diff --git a/profiler/cli/entrance.py b/profiler/cli/entrance.py index b14d3dfd86..d9b5b10da7 100644 --- a/profiler/cli/entrance.py +++ b/profiler/cli/entrance.py @@ -53,7 +53,7 @@ advisor_cli.add_command(compare_cli, name="compare") if __name__ == '__main__': advisor_cli.main( - ["analyze", "scheduling", "-d", + ["analyze", "schedule", "-d", r"/home/ma-user/work/profiling", ] ) diff --git a/profiler/cli/update_cli.py b/profiler/cli/update_cli.py new file mode 100644 index 0000000000..9407981ae0 --- /dev/null +++ b/profiler/cli/update_cli.py @@ -0,0 +1,40 @@ +from urllib import parse + +import click + +from profiler.advisor.common import constant +from profiler.advisor.config.config import Config +from profiler.advisor.utils.tools import CONTEXT_SETTINGS, ClickAliasedGroup +from profiler.advisor.utils.utils import debug_option, request_with_retry + + +@click.group(name="update", cls=ClickAliasedGroup) +def update_cli(**kwargs): + """Update operation command, such as update rule and specify save path.""" + pass + + +@update_cli.command(context_settings=CONTEXT_SETTINGS, + name="rule", + short_help='Update the ma-advisor rules on the terminal. The default save path is ' + '"~/rules/cloud/". If user want to specify the save path, please use the environment ' + 'variable "ADVISOR_RULE_PATH"') +@click.option('--region', '-r', type=click.Choice(constant.CLOUD_RULE_REGION_LIST), required=True, + default=constant.DEFAULT_CLOUD_RULE_REGION, + help='Specifies the region where the rule file is downloaded.') +@debug_option +def update_rule(**kwargs) -> None: + """ + Download the latest rule yaml file. + """ + region_name = kwargs.get("region") + rule_bucket = Config().config.get(constant.RULE_BUCKET, region_name) + rule_endpoint_suffix = constant.COMMON_ENDPOINT_SUFFIX.format(region_name) + if region_name in constant.INNER_REGION_LIST: + rule_endpoint_suffix = constant.INNER_ENDPOINT_SUFFIX.format(region_name) + + obs_url = constant.HTTPS_PREFIXES + rule_bucket + "." + rule_endpoint_suffix + obs_url = parse.urljoin(obs_url, constant.COMMON_YAML_DIR) + for file_name in constant.CLOUD_YAML_NAME_LIST: + url = parse.urljoin(obs_url, file_name) + request_with_retry(url, region_name) diff --git a/requirements/test.txt b/requirements/tests.txt similarity index 95% rename from requirements/test.txt rename to requirements/tests.txt index 3bacb7ca55..bab89704aa 100644 --- a/requirements/test.txt +++ b/requirements/tests.txt @@ -1,5 +1,5 @@ -pytest==6.2.4 -pytest-cov==2.12.0 -pytest-mock==3.6.1 -pytest-cookies==0.6.1 +pytest==6.2.4 +pytest-cov==2.12.0 +pytest-mock==3.6.1 +pytest-cookies==0.6.1 mock==4.0.3 \ No newline at end of file diff --git a/version.txt b/version.txt index 7bcd0e3612..9f8e9b69a3 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.0.2 \ No newline at end of file +1.0 \ No newline at end of file -- Gitee From 1761de9838e078f50ff993c34fcfb9c39f7d02db Mon Sep 17 00:00:00 2001 From: PersonalC Date: Wed, 8 May 2024 19:35:20 +0800 Subject: [PATCH 09/21] =?UTF-8?q?att=20advisor=E6=96=B0=E5=A2=9Edataset?= =?UTF-8?q?=E6=A1=86=E6=9E=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- profiler/advisor/analyzer/base_analyzer.py | 73 +- .../analyzer/cluster/slow_link_analyser.py | 10 +- .../analyzer/cluster/slow_rank_analyser.py | 10 +- .../{computing => computation}/__init__.py | 0 .../aicpu/__init__.py | 0 .../bound/__init__.py | 0 .../op_compile/__init__.py | 0 .../analyzer/overall/overall_analyzer.py | 1 + .../{scheduling => schedule}/__init__.py | 0 .../free_event/__init__.py | 0 .../fusion_ops/__init__.py | 0 .../fusion_ops/fusion_ops_analyzer.py | 542 ++++----- profiler/advisor/common/constant.py | 12 +- .../advisor/common/timeline/fusion_ops_db.py | 340 +----- .../common/timeline/fusion_ops_rule.py | 110 ++ .../timeline/fusion_ops_rule_handler.py | 193 +++ profiler/advisor/config/config.ini | 8 +- .../dataset/cluster/cluster_dataset.py | 23 +- profiler/advisor/dataset/dataset.py | 38 + .../advisor/dataset/timeline_event_dataset.py | 26 +- profiler/advisor/interface/interface.py | 33 +- profiler/advisor/utils/utils.py | 1048 +++++++++-------- profiler/cli/analyze_cli.py | 72 +- profiler/cli/entrance.py | 2 +- profiler/cli/update_cli.py | 40 + requirements/{test.txt => tests.txt} | 8 +- version.txt | 2 +- 27 files changed, 1414 insertions(+), 1177 deletions(-) rename profiler/advisor/analyzer/{computing => computation}/__init__.py (100%) rename profiler/advisor/analyzer/{computing => computation}/aicpu/__init__.py (100%) rename profiler/advisor/analyzer/{computing => computation}/bound/__init__.py (100%) rename profiler/advisor/analyzer/{computing => computation}/op_compile/__init__.py (100%) rename profiler/advisor/analyzer/{scheduling => schedule}/__init__.py (100%) rename profiler/advisor/analyzer/{scheduling => schedule}/free_event/__init__.py (100%) rename profiler/advisor/analyzer/{scheduling => schedule}/fusion_ops/__init__.py (100%) rename profiler/advisor/analyzer/{scheduling => schedule}/fusion_ops/fusion_ops_analyzer.py (91%) create mode 100644 profiler/advisor/common/timeline/fusion_ops_rule.py create mode 100644 profiler/advisor/common/timeline/fusion_ops_rule_handler.py create mode 100644 profiler/advisor/dataset/dataset.py create mode 100644 profiler/cli/update_cli.py rename requirements/{test.txt => tests.txt} (95%) diff --git a/profiler/advisor/analyzer/base_analyzer.py b/profiler/advisor/analyzer/base_analyzer.py index ff945da5cf..6f9438ba75 100644 --- a/profiler/advisor/analyzer/base_analyzer.py +++ b/profiler/advisor/analyzer/base_analyzer.py @@ -1,17 +1,60 @@ +import logging +from functools import wraps +from typing import Dict, List, Union from abc import abstractmethod, ABCMeta + +from profiler.advisor.common import constant +from profiler.advisor.common.version_control import VersionControl +from profiler.advisor.dataset.dataset import Dataset +from profiler.advisor.result.result import OptimizeResult from profiler.advisor.display.html.render import HTMLRender -dataset_cls_list = [] +logger = logging.getLogger() + + +class BaseAnalyzer(VersionControl, metaclass=ABCMeta): + _SUPPORT_VERSIONS = constant.SUPPORTED_CANN_VERSION -class BaseAnalyzer(metaclass=ABCMeta): - def __init__(self, collection_path, dataset_cls_list, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): + dataset_cls_list = [] + + def __init__(self, collection_path, n_processes: int = 1, cann_version=constant.DEFAULT_CANN_VERSION, + torch_version=constant.DEFAULT_TORCH_VERSION, **kwargs): self.n_processes = n_processes self.cann_version = cann_version self.torch_version = torch_version self.html_render = HTMLRender() self.collection_path = collection_path self.kwargs = kwargs - self.event_dataset_list = self.get_dataset_dict(dataset_cls_list) + self.dataset_list: Dict[str, List[Dataset]] = {} + self.init_dataset_list() + self.result = OptimizeResult() + self.record_list: Dict[str, List] = {} + + @classmethod + def check_data(cls, data_list: tuple): + """ + check if all data in data list is contained + :param data_list: data list to check + :return: func ptr if check success + """ + + def decorate(func): + + @wraps(func) + def wrapper(self): + data = self.dataset_list + if data is None: + return None + for data_key in data_list: + if data_key not in data: + return None + + logger.info("Enable analysis %s with %s", self.__class__.__name__, ",".join(data_list)) + return func(self, data) + + return wrapper + + return decorate @abstractmethod def optimize(self): @@ -25,10 +68,24 @@ class BaseAnalyzer(metaclass=ABCMeta): def make_render(self): pass - def get_dataset_dict(self, dataset_cls_list): - datasets = {key: [] for key in dataset_cls_list} + def init_dataset_list(self)->None: + dataset_cls_list = self.dataset_cls_list + if len(dataset_cls_list) == 0: + logger.warning(f"Analyser: %s don't rely on any dataset!", self.__class__.__name__) + return for dataset_cls in dataset_cls_list: if dataset_cls and callable(dataset_cls): - datasets[dataset_cls] = dataset_cls(self.collection_path, **self.kwargs) - return datasets + dataset_cls(collection_path=self.collection_path, data=self.dataset_list, **self.kwargs) + + @staticmethod + def get_first_data_by_key(data, key) -> Union[Dataset, None]: + """ + get the first member from data with key + :param data: input data + :param key: data key + :return: the first dataset in dataset list + """ + if key in data and len(data[key]) > 0: + return data[key][0] + return None diff --git a/profiler/advisor/analyzer/cluster/slow_link_analyser.py b/profiler/advisor/analyzer/cluster/slow_link_analyser.py index d4212ada2c..e9143ae1de 100644 --- a/profiler/advisor/analyzer/cluster/slow_link_analyser.py +++ b/profiler/advisor/analyzer/cluster/slow_link_analyser.py @@ -14,7 +14,9 @@ # limitations under the License. from collections import defaultdict +from typing import Dict, List from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.advisor.common import constant from profiler.advisor.result.result import OptimizeResult from profiler.advisor.result.item import OptimizeItem, OptimizeRecord from profiler.advisor.dataset.cluster.cluster_dataset import ClusterCommunicationDataSet @@ -35,9 +37,11 @@ class SlowLinkAnalyzer(BaseAnalyzer): SLOW_LINK_ANALYSIS = "slow_link_analysis" dataset_cls_list = [ClusterCommunicationDataSet] - def __init__(self, collection_path, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): - super().__init__(collection_path, self.dataset_cls_list, n_processes, cann_version, torch_version, **kwargs) - self.communication_data_class = self.event_dataset_list[ClusterCommunicationDataSet] + def __init__(self, collection_path, n_processes: int = 1, cann_version=constant.DEFAULT_CANN_VERSION, + torch_version=constant.DEFAULT_TORCH_VERSION, **kwargs): + super().__init__(collection_path, n_processes, cann_version, torch_version, **kwargs) + key = ClusterCommunicationDataSet.get_key() + self.communication_data_class = self.get_first_data_by_key(self.dataset_list, key) self.rank_bw_dict = self.communication_data_class.get_data() self.result = OptimizeResult() self.bottelneck = '' diff --git a/profiler/advisor/analyzer/cluster/slow_rank_analyser.py b/profiler/advisor/analyzer/cluster/slow_rank_analyser.py index 35b4663d38..b49ef5ec8c 100644 --- a/profiler/advisor/analyzer/cluster/slow_rank_analyser.py +++ b/profiler/advisor/analyzer/cluster/slow_rank_analyser.py @@ -14,7 +14,9 @@ # limitations under the License. from collections import defaultdict +from typing import Dict, List from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.advisor.common import constant from profiler.advisor.result.result import OptimizeResult from profiler.advisor.result.item import OptimizeItem, OptimizeRecord from profiler.advisor.dataset.cluster.cluster_dataset import ClusterStepTraceTimeDataSet @@ -27,9 +29,11 @@ class SlowRankAnalyzer(BaseAnalyzer): BOTTLENECK_LIST = ['Computing', 'Communication', "Free"] dataset_cls_list = [ClusterStepTraceTimeDataSet] - def __init__(self, collection_path, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): - super().__init__(collection_path, self.dataset_cls_list, n_processes, cann_version, torch_version, **kwargs) - self.step_trace_class = self.event_dataset_list[ClusterStepTraceTimeDataSet] + def __init__(self, collection_path, n_processes: int = 1, cann_version=constant.DEFAULT_CANN_VERSION, + torch_version=constant.DEFAULT_TORCH_VERSION, **kwargs): + super().__init__(collection_path, n_processes, cann_version, torch_version, **kwargs) + key = ClusterStepTraceTimeDataSet.get_key() + self.step_trace_class = self.get_first_data_by_key(self.dataset_list, key) self.step_trace_dict = self.step_trace_class.get_data() self.result = OptimizeResult() self.bottelneck = '' diff --git a/profiler/advisor/analyzer/computing/__init__.py b/profiler/advisor/analyzer/computation/__init__.py similarity index 100% rename from profiler/advisor/analyzer/computing/__init__.py rename to profiler/advisor/analyzer/computation/__init__.py diff --git a/profiler/advisor/analyzer/computing/aicpu/__init__.py b/profiler/advisor/analyzer/computation/aicpu/__init__.py similarity index 100% rename from profiler/advisor/analyzer/computing/aicpu/__init__.py rename to profiler/advisor/analyzer/computation/aicpu/__init__.py diff --git a/profiler/advisor/analyzer/computing/bound/__init__.py b/profiler/advisor/analyzer/computation/bound/__init__.py similarity index 100% rename from profiler/advisor/analyzer/computing/bound/__init__.py rename to profiler/advisor/analyzer/computation/bound/__init__.py diff --git a/profiler/advisor/analyzer/computing/op_compile/__init__.py b/profiler/advisor/analyzer/computation/op_compile/__init__.py similarity index 100% rename from profiler/advisor/analyzer/computing/op_compile/__init__.py rename to profiler/advisor/analyzer/computation/op_compile/__init__.py diff --git a/profiler/advisor/analyzer/overall/overall_analyzer.py b/profiler/advisor/analyzer/overall/overall_analyzer.py index 93b227fb61..e31a5d4288 100644 --- a/profiler/advisor/analyzer/overall/overall_analyzer.py +++ b/profiler/advisor/analyzer/overall/overall_analyzer.py @@ -1,4 +1,5 @@ import logging +from typing import Dict, List from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer from profiler.advisor.display.html.render import HTMLRender diff --git a/profiler/advisor/analyzer/scheduling/__init__.py b/profiler/advisor/analyzer/schedule/__init__.py similarity index 100% rename from profiler/advisor/analyzer/scheduling/__init__.py rename to profiler/advisor/analyzer/schedule/__init__.py diff --git a/profiler/advisor/analyzer/scheduling/free_event/__init__.py b/profiler/advisor/analyzer/schedule/free_event/__init__.py similarity index 100% rename from profiler/advisor/analyzer/scheduling/free_event/__init__.py rename to profiler/advisor/analyzer/schedule/free_event/__init__.py diff --git a/profiler/advisor/analyzer/scheduling/fusion_ops/__init__.py b/profiler/advisor/analyzer/schedule/fusion_ops/__init__.py similarity index 100% rename from profiler/advisor/analyzer/scheduling/fusion_ops/__init__.py rename to profiler/advisor/analyzer/schedule/fusion_ops/__init__.py diff --git a/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py b/profiler/advisor/analyzer/schedule/fusion_ops/fusion_ops_analyzer.py similarity index 91% rename from profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py rename to profiler/advisor/analyzer/schedule/fusion_ops/fusion_ops_analyzer.py index ca10dcb1f5..4259db093b 100644 --- a/profiler/advisor/analyzer/scheduling/fusion_ops/fusion_ops_analyzer.py +++ b/profiler/advisor/analyzer/schedule/fusion_ops/fusion_ops_analyzer.py @@ -1,271 +1,271 @@ -import multiprocessing -import logging -import re - -from tqdm import tqdm - -from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer -from profiler.advisor.common import constant as const -from profiler.advisor.common.timeline.event import TimelineEvent -from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset -from profiler.advisor.result.result import OptimizeResult -from profiler.advisor.result.item import OptimizeItem, OptimizeRecord -from profiler.advisor.utils.utils import format_timeline_result -from profiler.advisor.utils.utils import init_timeline_ops_db - -logger = logging.getLogger() - - -class TimelineFusionOpsAnalyzer(BaseAnalyzer): - dataset_cls_list = [TimelineEventDataset] - - def __init__(self, collection_path, n_processes: int = 1, cann_version=None, torch_version=None, **kwargs): - super().__init__(collection_path, self.dataset_cls_list, n_processes, cann_version, torch_version, **kwargs) - self._matched_op_index = {} if self.n_processes <= 1 else multiprocessing.Manager().dict() - self.matched_op_stacks = {} - self.empty_stacks = True - self.result = OptimizeResult() - self.timeline_event_dataset = self.event_dataset_list[TimelineEventDataset] - - def optimize(self, **kwargs): - for mode in [const.ATEN.lower(), const.OPTIMIZER.lower()]: - - for op_combined, npu_apis in tqdm(getattr(init_timeline_ops_db(self.cann_version, self.torch_version), - f"_{mode}_op_api_map").items(), leave=False, ncols=100, - desc="Scanning timeline for affinity apis"): - for npu_api in npu_apis.split("/"): - self.find_fusion_ops(self.timeline_event_dataset, op_combined, npu_api, mode) - - self.query_stack(self.timeline_event_dataset) - - logger.info("Finish timeline analysis") - self.make_record() - self.make_render() - return self.result - - def find_fusion_ops(self, event_dataset: TimelineEventDataset, ops: str, npu_api: str, mode: str): - """ - :Param event_dataset: dataset of timeline event - :Param ops: operator combination with '-' as separator , e.g. permute-reshape - :Param npu_api: api of torch_npu, generally more efficient than torch api - :Param mode: aten or dequeue or optimizer - :Return: json of op_name and called times and detail stacks - """ - op_rule_pattern, enable_regex = self._format_rule_to_pattern(ops) - if not enable_regex: - self._match_ops(event_dataset, op_rule_pattern, npu_api, mode) - else: - try: - self._match_ops_with_regex(event_dataset, op_rule_pattern, npu_api, mode) - except Exception as e: - logger.warning("Failed to find fusion operators with regex %s, reason is %s", ops, e) - - def _match_ops(self, event_dataset: TimelineEventDataset, ops: str, npu_api: str, mode: str): - """ match operator based on fusion operators rule(without regex), - only strictly equals of op name list means matched - :Param event_dataset: dataset of timeline event - :Param ops: operator combination with '-' as separator , e.g. permute-reshape - :Param npu_api: api of torch_npu, generally more efficient than torch api - :Param mode: aten or dequeue or optimizer - """ - op_list = ops.split(const.OP_SEP) - - matched_op_index = set() - api_ops_matched = False - - for index, event in enumerate(getattr(event_dataset, mode)): - if self._replace_op_name_prefix(event.name, mode) != op_list[0]: - continue - tmp_dequeue_event_names = [self._replace_op_name_prefix(event.name, mode) for event in - getattr(event_dataset, mode)[index: index + len(op_list)]] - if tmp_dequeue_event_names != op_list: - continue - api_ops_matched = True - matched_op_index.add(event.dataset_index) - - if api_ops_matched: - self._matched_op_index[npu_api + f":{ops}"] = matched_op_index - - def _match_ops_with_regex(self, event_dataset: TimelineEventDataset, op_rule_pattern: str, npu_api: str, - mode: str): - """ match operator based on fusion operators rule(with regex), - using regex to support condition like 'a = torch.mul(xxx) if xxx else torch.add(xxx)' - :Param event_dataset: dataset of timeline event - :Param op_rule_pattern: fusion operators rule with regex definition , e.g. add-mul{0,10}, add-mul* - :Param npu_api: api of torch_npu, generally more efficient than torch api - :Param mode: aten or dequeue or optimizer - """ - matched_op_index = set() - total_op_name = "".join([f"{const.OP_SEP}{self._replace_op_name_prefix(event.name, mode)}{const.OP_SEP}" - for event in - getattr(event_dataset, mode)]) - - matched_pattern_index_tuple = [(x.start(0), x.end(0)) for x in re.finditer(op_rule_pattern, total_op_name)] - # convert list of index tuple to a whole list: [(3, 25), ...] -> [3, 25, ...] - total_ops_split_points = [num for sublist in matched_pattern_index_tuple for num in sublist] - - api_ops_matched = len(total_ops_split_points) != 0 - - op_index = [] - if 0 not in total_ops_split_points: - total_ops_split_points = [0] + total_ops_split_points - if len(list(total_op_name)) not in total_ops_split_points: - total_ops_split_points.append(len(list(total_op_name))) - - # convert total ops name like "-add-mul-xxx-div-" to small pieces like [["add", "mul"], [...], ["div"]] - # by the regex index and then calculate the real index for matched fusion operators in event dataset - for l, r in zip(total_ops_split_points, total_ops_split_points[1:]): - matched_op_flag = True if (l, r) in matched_pattern_index_tuple else False - matched_ops_list = total_op_name[l: r].strip(const.OP_SEP).split(const.OP_SEP + const.OP_SEP) - op_index.append([matched_op_flag, len(matched_ops_list)]) - for i, _ in enumerate(op_index): - if i > 0: - # calculate cumsum for indexing matched operator - op_index[i][1] = op_index[i][1] + op_index[i - 1][1] - op_index = [[False, 0]] + op_index - - for i, _ in enumerate(op_index): - if not op_index[i][0]: - continue - index = op_index[i - 1][1] - matched_op_index.add(index) - - if index > len(getattr(event_dataset, mode)) - 1: - continue - dataset_index = getattr(event_dataset, mode)[index].get("dataset_index") - matched_op_index.add(dataset_index) - - if api_ops_matched: - self._matched_op_index[npu_api + f":{op_rule_pattern}"] = sorted(list(matched_op_index)) - - def make_record(self): - """ - make record for what and how to optimize - """ - if not self.matched_op_stacks: - return - - desc = f"Found {len(format_timeline_result(self.matched_op_stacks))} apis to be replaced" \ - f" based on the runtime env cann-{self.cann_version} and torch-{self.torch_version}" - suggestion = "Please replace training api according to sub table 'Affinity training api'" - if self.empty_stacks: - desc += ", but with no stack" - suggestion = const.TIMELINE_EMPTY_STACKS_PROMPT.format( - timeline_profiling_doc_url=const.TIMELINE_WITH_STACK_DOC_URL - ) - - optimization_item = OptimizeItem( - const.AFFINITY_TRAINING_API, - desc, - [suggestion] - ) - - self.result.add(OptimizeRecord(optimization_item)) - - record_title = ["Affinity API", "Code stacks", "Stack called counts"] - self.result.add_detail(const.AFFINITY_TRAINING_API, headers=record_title) - - for api_name, stacks_info in format_timeline_result(self.matched_op_stacks).items(): - if not stacks_info: - detail = [api_name, "null", "null"] - self.result.add_detail(const.AFFINITY_TRAINING_API, detail=detail) - else: - for stack in stacks_info: - detail = [api_name, *stack] - self.result.add_detail(const.AFFINITY_TRAINING_API, detail=detail) - - def make_render(self): - format_result_for_html = format_timeline_result(dict(self.matched_op_stacks), dump_html=True) - - self.html_render.render_template(key="scheduling", - template_dir="templates", - template_name="affinity_api.html", - cann_version=self.cann_version, - torch_version=self.torch_version, - empty_stacks=self.empty_stacks, - with_stack_doc_url=const.TIMELINE_WITH_STACK_DOC_URL, - api_doc_url=const.TIMELINE_API_DOC_URL, - result=format_result_for_html) - - def query_stack(self, event_dataset: TimelineEventDataset): - if all([len(matched_index) == 0 for matched_index in self._matched_op_index.values()]): - return - - op_stack_list = event_dataset.parse_data_with_generator(self._query_stack_by_matched_index) - for op_stack in op_stack_list: - for op_rule, stack in op_stack.items(): - if op_rule not in self.matched_op_stacks: - self.matched_op_stacks[op_rule] = {} - if stack == const.TIMELINE_FUSION_OPS_NO_STACK_FLAG: - continue - if stack not in self.matched_op_stacks[op_rule]: - self.matched_op_stacks[op_rule][stack] = 0 - self.matched_op_stacks[op_rule][stack] += 1 - - def _query_stack_by_matched_index(self, index, event): - stack_record = {} - event = TimelineEvent(event) - - matched_op_rules = [] - for op_rule, matched_index in self._matched_op_index.items(): - if index not in matched_index: - continue - - matched_op_rules.append(op_rule) - stack = event.args.get(const.CALL_STACKS) - - if not stack: - logger.debug("Got empty '%s' for event %s", const.CALL_STACKS, event) - continue - - if self.empty_stacks and stack: - self.empty_stacks = False - - stack_record[op_rule] = stack - - if matched_op_rules and not stack_record: - for op_rule in matched_op_rules: - stack_record[op_rule] = const.TIMELINE_FUSION_OPS_NO_STACK_FLAG - - return stack_record - - def _replace_op_name_prefix(self, event_name, mode): - if mode == const.DEQUEUE.lower(): - op_name_prefix = f"{const.DEQUEUE}{const.DEQUEUE_SEP}" - elif mode == const.ATEN: - op_name_prefix = f"{const.ATEN}{const.ATEN_SEP}" - else: - op_name_prefix = f"{const.OPTIMIZER}.{const.OPTIMIZER_STEP}{const.OPTIMIZER_SEP}" - - return event_name.replace(op_name_prefix, "") - - def _format_rule_to_pattern(self, op_rule): - """ - Args: - op_rule: like (mul){0,1}-(add|neg){0,2}-dropout-(softmax)* - - Returns: op_pattern like (-mul-){0,1}(-add-|-neg-){0,2}(-dropout-)(-softmax-)* - """ - enable_regex = False - if "(" not in op_rule and ")" not in op_rule: - # op_rule which requires fuzzy matching mush consist of "()" - return op_rule, enable_regex - - enable_regex = True - op_pattern_list = op_rule.split(const.OP_SEP) - format_op_pattern = "" - for op_pattern in op_pattern_list: - matched_res = re.search(r'\((.*?)\)', op_pattern) - - ops_index_range = (matched_res.start() + 1, matched_res.end() - 1) if matched_res else ( - 0, len(op_pattern)) - - op_names = op_pattern[ops_index_range[0]: ops_index_range[1]] - tmp_op_names_record = [] - for op_name in op_names.split("|"): - tmp_op_names_record.append(f"{const.OP_SEP}{op_name.strip(' ')}{const.OP_SEP}") - op_suffix = op_pattern[ops_index_range[1] + 1:] - op_names_format = f"({'|'.join(tmp_op_names_record)}){op_suffix}" - - format_op_pattern += op_names_format - return format_op_pattern, enable_regex +import multiprocessing +import logging +import re + +from tqdm import tqdm + +from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.advisor.common import constant as const +from profiler.advisor.common.timeline.event import TimelineEvent +from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset +from profiler.advisor.result.item import OptimizeItem, OptimizeRecord +from profiler.advisor.utils.utils import format_timeline_result +from profiler.advisor.common.timeline.fusion_ops_db import init_timeline_ops_db + +logger = logging.getLogger() + + +class TimelineFusionOpsAnalyzer(BaseAnalyzer): + dataset_cls_list = [TimelineEventDataset] + + def __init__(self, collection_path, n_processes: int = 1, cann_version=const.DEFAULT_CANN_VERSION, + torch_version=const.DEFAULT_TORCH_VERSION, **kwargs): + super().__init__(collection_path, n_processes, cann_version, torch_version, **kwargs) + self._matched_op_index = {} if self.n_processes <= 1 else multiprocessing.Manager().dict() + self.matched_op_stacks = {} + self.empty_stacks = True + key = TimelineEventDataset.get_key() + self.timeline_event_dataset = self.get_first_data_by_key(self.dataset_list, key) + + def optimize(self, **kwargs): + for mode in [const.ATEN.lower(), const.OPTIMIZER.lower()]: + + for op_combined, npu_apis in tqdm(getattr(init_timeline_ops_db(self.cann_version, self.torch_version), + f"_{mode}_op_api_map").items(), leave=False, ncols=100, + desc="Scanning timeline for affinity apis"): + for npu_api in npu_apis.split("/"): + self.find_fusion_ops(self.timeline_event_dataset, op_combined, npu_api, mode) + + self.query_stack(self.timeline_event_dataset) + + logger.info("Finish timeline analysis") + self.make_record() + self.make_render() + return self.result + + def find_fusion_ops(self, event_dataset, ops: str, npu_api: str, mode: str): + """ + :Param event_dataset: dataset of timeline event + :Param ops: operator combination with '-' as separator , e.g. permute-reshape + :Param npu_api: api of torch_npu, generally more efficient than torch api + :Param mode: aten or dequeue or optimizer + :Return: json of op_name and called times and detail stacks + """ + op_rule_pattern, enable_regex = self._format_rule_to_pattern(ops) + if not enable_regex: + self._match_ops(event_dataset, op_rule_pattern, npu_api, mode) + else: + try: + self._match_ops_with_regex(event_dataset, op_rule_pattern, npu_api, mode) + except Exception as e: + logger.warning("Failed to find fusion operators with regex %s, reason is %s", ops, e) + + def _match_ops(self, event_dataset, ops: str, npu_api: str, mode: str): + """ match operator based on fusion operators rule(without regex), + only strictly equals of op name list means matched + :Param event_dataset: dataset of timeline event + :Param ops: operator combination with '-' as separator , e.g. permute-reshape + :Param npu_api: api of torch_npu, generally more efficient than torch api + :Param mode: aten or dequeue or optimizer + """ + op_list = ops.split(const.OP_SEP) + + matched_op_index = set() + api_ops_matched = False + + for index, event in enumerate(getattr(event_dataset, mode)): + if self._replace_op_name_prefix(event.name, mode) != op_list[0]: + continue + tmp_dequeue_event_names = [self._replace_op_name_prefix(event.name, mode) for event in + getattr(event_dataset, mode)[index: index + len(op_list)]] + if tmp_dequeue_event_names != op_list: + continue + api_ops_matched = True + matched_op_index.add(event.dataset_index) + + if api_ops_matched: + self._matched_op_index[npu_api + f":{ops}"] = matched_op_index + + def _match_ops_with_regex(self, event_dataset, op_rule_pattern: str, npu_api: str, + mode: str): + """ match operator based on fusion operators rule(with regex), + using regex to support condition like 'a = torch.mul(xxx) if xxx else torch.add(xxx)' + :Param event_dataset: dataset of timeline event + :Param op_rule_pattern: fusion operators rule with regex definition , e.g. add-mul{0,10}, add-mul* + :Param npu_api: api of torch_npu, generally more efficient than torch api + :Param mode: aten or dequeue or optimizer + """ + matched_op_index = set() + total_op_name = "".join([f"{const.OP_SEP}{self._replace_op_name_prefix(event.name, mode)}{const.OP_SEP}" + for event in + getattr(event_dataset, mode)]) + + matched_pattern_index_tuple = [(x.start(0), x.end(0)) for x in re.finditer(op_rule_pattern, total_op_name)] + # convert list of index tuple to a whole list: [(3, 25), ...] -> [3, 25, ...] + total_ops_split_points = [num for sublist in matched_pattern_index_tuple for num in sublist] + + api_ops_matched = len(total_ops_split_points) != 0 + + op_index = [] + if 0 not in total_ops_split_points: + total_ops_split_points = [0] + total_ops_split_points + if len(list(total_op_name)) not in total_ops_split_points: + total_ops_split_points.append(len(list(total_op_name))) + + # convert total ops name like "-add-mul-xxx-div-" to small pieces like [["add", "mul"], [...], ["div"]] + # by the regex index and then calculate the real index for matched fusion operators in event dataset + for l, r in zip(total_ops_split_points, total_ops_split_points[1:]): + matched_op_flag = True if (l, r) in matched_pattern_index_tuple else False + matched_ops_list = total_op_name[l: r].strip(const.OP_SEP).split(const.OP_SEP + const.OP_SEP) + op_index.append([matched_op_flag, len(matched_ops_list)]) + for i, _ in enumerate(op_index): + if i > 0: + # calculate cumsum for indexing matched operator + op_index[i][1] = op_index[i][1] + op_index[i - 1][1] + op_index = [[False, 0]] + op_index + + for i, _ in enumerate(op_index): + if not op_index[i][0]: + continue + index = op_index[i - 1][1] + matched_op_index.add(index) + + if index > len(getattr(event_dataset, mode)) - 1: + continue + dataset_index = getattr(event_dataset, mode)[index].get("dataset_index") + matched_op_index.add(dataset_index) + + if api_ops_matched: + self._matched_op_index[npu_api + f":{op_rule_pattern}"] = sorted(list(matched_op_index)) + + def make_record(self): + """ + make record for what and how to optimize + """ + if not self.matched_op_stacks: + return + + desc = f"Found {len(format_timeline_result(self.matched_op_stacks))} apis to be replaced" \ + f" based on the runtime env cann-{self.cann_version} and torch-{self.torch_version}" + suggestion = "Please replace training api according to sub table 'Affinity training api'" + if self.empty_stacks: + desc += ", but with no stack" + suggestion = const.TIMELINE_EMPTY_STACKS_PROMPT.format( + timeline_profiling_doc_url=const.TIMELINE_WITH_STACK_DOC_URL + ) + + optimization_item = OptimizeItem( + const.AFFINITY_TRAINING_API, + desc, + [suggestion] + ) + + self.result.add(OptimizeRecord(optimization_item)) + + record_title = ["Affinity API", "Code stacks", "Stack called counts"] + self.result.add_detail(const.AFFINITY_TRAINING_API, headers=record_title) + + for api_name, stacks_info in format_timeline_result(self.matched_op_stacks).items(): + if not stacks_info: + detail = [api_name, "null", "null"] + self.result.add_detail(const.AFFINITY_TRAINING_API, detail=detail) + else: + for stack in stacks_info: + detail = [api_name, *stack] + self.result.add_detail(const.AFFINITY_TRAINING_API, detail=detail) + + def make_render(self): + format_result_for_html = format_timeline_result(dict(self.matched_op_stacks), dump_html=True) + + self.html_render.render_template(key="schedule", + template_dir="templates", + template_name="affinity_api.html", + cann_version=self.cann_version, + torch_version=self.torch_version, + empty_stacks=self.empty_stacks, + with_stack_doc_url=const.TIMELINE_WITH_STACK_DOC_URL, + api_doc_url=const.TIMELINE_API_DOC_URL, + result=format_result_for_html) + + def query_stack(self, event_dataset): + if all([len(matched_index) == 0 for matched_index in self._matched_op_index.values()]): + return + + op_stack_list = event_dataset.parse_data_with_generator(self._query_stack_by_matched_index) + for op_stack in op_stack_list: + for op_rule, stack in op_stack.items(): + if op_rule not in self.matched_op_stacks: + self.matched_op_stacks[op_rule] = {} + if stack == const.TIMELINE_FUSION_OPS_NO_STACK_FLAG: + continue + if stack not in self.matched_op_stacks[op_rule]: + self.matched_op_stacks[op_rule][stack] = 0 + self.matched_op_stacks[op_rule][stack] += 1 + + def _query_stack_by_matched_index(self, index, event): + stack_record = {} + event = TimelineEvent(event) + + matched_op_rules = [] + for op_rule, matched_index in self._matched_op_index.items(): + if index not in matched_index: + continue + + matched_op_rules.append(op_rule) + stack = event.args.get(const.CALL_STACKS) + + if not stack: + logger.debug("Got empty '%s' for event %s", const.CALL_STACKS, event) + continue + + if self.empty_stacks and stack: + self.empty_stacks = False + + stack_record[op_rule] = stack + + if matched_op_rules and not stack_record: + for op_rule in matched_op_rules: + stack_record[op_rule] = const.TIMELINE_FUSION_OPS_NO_STACK_FLAG + + return stack_record + + def _replace_op_name_prefix(self, event_name, mode): + if mode == const.DEQUEUE.lower(): + op_name_prefix = f"{const.DEQUEUE}{const.DEQUEUE_SEP}" + elif mode == const.ATEN: + op_name_prefix = f"{const.ATEN}{const.ATEN_SEP}" + else: + op_name_prefix = f"{const.OPTIMIZER}.{const.OPTIMIZER_STEP}{const.OPTIMIZER_SEP}" + + return event_name.replace(op_name_prefix, "") + + def _format_rule_to_pattern(self, op_rule): + """ + Args: + op_rule: like (mul){0,1}-(add|neg){0,2}-dropout-(softmax)* + + Returns: op_pattern like (-mul-){0,1}(-add-|-neg-){0,2}(-dropout-)(-softmax-)* + """ + enable_regex = False + if "(" not in op_rule and ")" not in op_rule: + # op_rule which requires fuzzy matching mush consist of "()" + return op_rule, enable_regex + + enable_regex = True + op_pattern_list = op_rule.split(const.OP_SEP) + format_op_pattern = "" + for op_pattern in op_pattern_list: + matched_res = re.search(r'\((.*?)\)', op_pattern) + + ops_index_range = (matched_res.start() + 1, matched_res.end() - 1) if matched_res else ( + 0, len(op_pattern)) + + op_names = op_pattern[ops_index_range[0]: ops_index_range[1]] + tmp_op_names_record = [] + for op_name in op_names.split("|"): + tmp_op_names_record.append(f"{const.OP_SEP}{op_name.strip(' ')}{const.OP_SEP}") + op_suffix = op_pattern[ops_index_range[1] + 1:] + op_names_format = f"({'|'.join(tmp_op_names_record)}){op_suffix}" + + format_op_pattern += op_names_format + return format_op_pattern, enable_regex diff --git a/profiler/advisor/common/constant.py b/profiler/advisor/common/constant.py index df12fd76d3..664753c724 100644 --- a/profiler/advisor/common/constant.py +++ b/profiler/advisor/common/constant.py @@ -56,8 +56,8 @@ CANN_VERSION_C13 = '7.0.RC1' CANN_VERSION_C15 = '7.0.0' CANN_VERSION_C17 = '8.0.0' SUPPORTED_CANN_VERSION = [CANN_VERSION_C30, CANN_VERSION_C13, CANN_VERSION_C15, CANN_VERSION_C17] -DEFAULT_CANN_VERSION = CANN_VERSION_C15 -ASCEND_PYTORCH_PROFILER = "ascend_pytorch_proflier" +DEFAULT_CANN_VERSION = CANN_VERSION_C17 +ASCEND_PYTORCH_PROFILER = "ascend_pytorch_profiler" MSLITE = "mslite" MSPROF = "msprof" SUPPORTED_PROFILING_TYPE = [ASCEND_PYTORCH_PROFILER, MSLITE, MSPROF] @@ -83,12 +83,20 @@ ADVISOR_LOG_LEVEL = "ADVISOR_LOG_LEVEL" DEFAULT_LOG_LEVEL = "INFO" SUPPORTED_LOG_LEVEL = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] +RULE_BUCKET = "RULE-BUCKET" CLOUD_RULE_REGION_CN_NORTH_9 = "cn-north-9" CLOUD_RULE_REGION_CN_NORTH_7 = "cn-north-7" CLOUD_RULE_REGION_CN_SOUTHWEST_2 = "cn-southwest-2" CLOUD_RULE_REGION_LIST = [CLOUD_RULE_REGION_CN_NORTH_7, CLOUD_RULE_REGION_CN_NORTH_9, CLOUD_RULE_REGION_CN_SOUTHWEST_2] +INNER_REGION_LIST = [CLOUD_RULE_REGION_CN_NORTH_7] DEFAULT_CLOUD_RULE_REGION = CLOUD_RULE_REGION_CN_SOUTHWEST_2 +HTTP_PREFIXES = "http://" +HTTPS_PREFIXES = "https://" +COMMON_YAML_DIR = "modelarts/solution/ma_advisor_rules/" +COMMON_ENDPOINT_SUFFIX = "obs.{}.myhuaweicloud.com" +INNER_ENDPOINT_SUFFIX= "obs.{}.ulanqab.huawei.com" + AICPU_RULES_YAML_NAME = "aicpu_rules.yaml" FUSSION_PASS_YAML_NAME = "op_fussion_pass.yaml" TIMELINE_FUSION_OPS_YAML_NAME = "timeline_fusion_ops.yaml" diff --git a/profiler/advisor/common/timeline/fusion_ops_db.py b/profiler/advisor/common/timeline/fusion_ops_db.py index 19a86437e0..f37cfe50d1 100644 --- a/profiler/advisor/common/timeline/fusion_ops_db.py +++ b/profiler/advisor/common/timeline/fusion_ops_db.py @@ -1,330 +1,44 @@ -import copy import logging import os import yaml -from profiler.advisor.common import constant as const +from profiler.advisor.common import constant +from profiler.advisor.common.timeline.fusion_ops_rule import OpRule +from profiler.advisor.common.timeline.fusion_ops_rule_handler import TimelineOpRuleHandler from profiler.advisor.utils.log import get_log_level +from profiler.advisor.utils.utils import get_file_path_by_walk logger = logging.getLogger() logger.setLevel(get_log_level()) -class TimelineOpRuleHandler: - """基于线性规划思想保存OpRule,用于局部继承、全局继承等功能""" - - def __init__(self): - self._db_content = None - # 具体生成的timeline规则,key为unique_id - self._all_tmp_timeline_op_rule = {} - # 所有timeline规则的dict集合,key为unique_id - self._all_origin_timeline_op_rule_dict = {} - # 已生成timeline规则的id数组 - self._exist_timeline_op_rule_unique_id_list = [] - - @staticmethod - def _get_local_inherit_id_list(op_rule: dict): - local_inherit_id_list = [] - for _, val in op_rule.items(): - if val.get("inherit_unique_id") is not None: - local_inherit_id_list.append(val.get("inherit_unique_id")) - return local_inherit_id_list - - @staticmethod - def _is_duplicated_element_in_lists(list_a, list_b): - """检查两个数组中是否存在重复的元素,若有任意元素重复,返回True""" - if not isinstance(list_a, list): - list_a = [list_a] - if not isinstance(list_b, list): - list_b = [list_b] - for element in list_a: - if element in list_b: - return True - return False - - def set_db_content(self, db_content): - # 过滤非 dict 格式, 或 dict 中没有定义 unique_id 的数据, 并保存到 _all_origin_timeline_op_rule_dict 中 - self._db_content = copy.deepcopy(db_content) - for rule_dic in self._db_content: - if not isinstance(rule_dic, dict) or rule_dic.get("unique_id") is None: - continue - self._all_origin_timeline_op_rule_dict[rule_dic.get("unique_id")] = rule_dic - if self._all_origin_timeline_op_rule_dict: - self.generate_all_timeline_op_rule() - - def generate_basic_timeline_op_rules(self): - """用于实现获取无全局继承规则, 无全局继承的规则认为是基础版本规则, 默认不会存在局部继承""" - for _, rule_dic in self._all_origin_timeline_op_rule_dict.items(): - if rule_dic.get("inherit_unique_id") is None: - self.add_basic_timeline_op_rule(rule_dic) - - def add_basic_timeline_op_rule(self, rule_dic): - # 若基础规则中存在局部继承的规则,则跳过 - local_inherit_id_list = self._get_local_inherit_id_list(rule_dic.get("operator_rules")) - if local_inherit_id_list: - return - - temp_rule = OpRule() - temp_rule.merge(rule_dic.get("operator_rules")) - - unique_id = rule_dic.get("unique_id") - logger.debug("The rule of version %s is basic rule.", unique_id) - self.add_new_timeline_op_rule(unique_id, temp_rule.tmp_rule) - - def add_empty_timeline_op_rule(self, unique_id): - if self._all_origin_timeline_op_rule_dict.get(unique_id) is None: - self._all_origin_timeline_op_rule_dict[unique_id] = {} - tmp_rule = {} - logger.debug("The rule of version %s is empty.", unique_id) - self.add_new_timeline_op_rule(unique_id, tmp_rule) - - def add_new_timeline_op_rule(self, unique_id, tmp_rule): - if unique_id not in self._exist_timeline_op_rule_unique_id_list: - self._exist_timeline_op_rule_unique_id_list.append(unique_id) - self._all_tmp_timeline_op_rule[unique_id] = tmp_rule - logger.debug("The rule of version %s is successfully generated.", unique_id) - - def generate_specified_list_timeline_op_rule(self, specified_unique_id_list, kid_id_list=None): - for specified_unique_id in specified_unique_id_list: - if specified_unique_id in self._exist_timeline_op_rule_unique_id_list: - self.generate_specified_timeline_op_rule(specified_unique_id, kid_id_list) - - def generate_specified_timeline_op_rule(self, specified_unique_id, kid_id_list=None): - """用于实现生成特定版本规则 - - 若不存在相应specified_unique_id的规则、或是已生成、循环继承等情况,将该规则置空并返回 - 规则库文件结构设置为多叉树, 结构决定了不断向下搜索最终应该是从基础版本开始继承, 递归生成, - 直到specified_unique_id规则依赖继承的规则库全部生成完毕, 再生成该指定规则库, 将specified_unique_id的规则库归档 - - 参数: - specified_unique_id: 指定版本规则id - kid_id_list: 子规则id数组, 用于防止循环继承, 如间接继承自身或直接继承自身等情况 - 返回: - None - """ - if kid_id_list is None: - kid_id_list = [] - - # 若该unique_id规则在timeline_fusion_ops.yaml中没有相应的规则, 生成该id规则,置为空 - if self._all_origin_timeline_op_rule_dict.get(specified_unique_id) is None: - logger.warning("The specified version %s does not exist in the rule library. " - "Ensure that the corresponding rule is configured in the YAML file. " - "The version %s is left blank.", - specified_unique_id, - specified_unique_id) - self.add_empty_timeline_op_rule(specified_unique_id) - return - - # 若该unique_id规则已经生成,则无需再次生成 - if specified_unique_id in self._exist_timeline_op_rule_unique_id_list: - logger.warning("The rule has been generated and does not need to be generated again. " - "Check whether unique id %s in the YAML file is duplicate.", - specified_unique_id) - return - - # 若kid_id_list不为空,且间接继承自身,则尝试生成空规则用于继承 - if kid_id_list and self._is_duplicated_element_in_lists(specified_unique_id, kid_id_list): - logger.warning("It cannot be inherited indirectly. Ensure that the corresponding rules are correctly " - "configured in the YAML file and leave Version %s blank.", - specified_unique_id) - self.add_empty_timeline_op_rule(specified_unique_id) - return - - rule_dic = self._all_origin_timeline_op_rule_dict.get(specified_unique_id) - if rule_dic is not None: - kid_id_list.append(specified_unique_id) - - global_inherit_id = rule_dic.get("inherit_unique_id") - if global_inherit_id and global_inherit_id not in self._exist_timeline_op_rule_unique_id_list: - logger.debug("The rule of version %s global inherit the rule of version %s", - specified_unique_id, global_inherit_id) - self.generate_specified_timeline_op_rule(global_inherit_id, kid_id_list) - - # 若局部继承的规则未生成, 生成该规则 - local_inherit_id_list = self._get_local_inherit_id_list(rule_dic.get("operator_rules")) - if local_inherit_id_list: - logger.debug("The rule of version %s local inherit the rule of version %s", - specified_unique_id, local_inherit_id_list) - self.generate_specified_list_timeline_op_rule(specified_unique_id_list=local_inherit_id_list, - kid_id_list=kid_id_list) - logger.debug("Start to generate rule of version %s", specified_unique_id) - # 实现全局继承与局部继承 - temp_rule = OpRule(timeline_op_rule_handler=self, - rule=self._all_tmp_timeline_op_rule.get(global_inherit_id)) - temp_rule.merge(rule_dic.get("operator_rules")) - # 将生成的规则归档保存 - self.add_new_timeline_op_rule(specified_unique_id, temp_rule.tmp_rule) - return - logger.error("Failed to generate the rule whose unique_id is %s. Ensure that the rule is configured in " - "the YAML file and the version %s is empty.", specified_unique_id, specified_unique_id) - self.add_empty_timeline_op_rule(specified_unique_id) - - def generate_all_timeline_op_rule(self): - """用于实现获取所有版本规则 - - 查找db_content中的规则库, 规则库文件结构设置为多叉树, 优先生成无继承的基础规则版本 - 循环并生成其他版本, 文件结构决定了不断向下搜索最终应该是从基础版本开始继承, 递归生成,直到全部规则库生成后退出函数 - - 参数: - None - 返回: - None - """ - self.generate_basic_timeline_op_rules() - _unique_id_list = copy.deepcopy(list(self._all_origin_timeline_op_rule_dict.keys())) - for unique_id in _unique_id_list: - if unique_id in self._exist_timeline_op_rule_unique_id_list: - continue - self.generate_specified_timeline_op_rule(unique_id) - - def get_tmp_timeline_op_rule_with_unique_id(self, unique_id): - if unique_id not in self._exist_timeline_op_rule_unique_id_list: - logger.error("The specified unique_id does not exist in the rule library. Ensure that the " - "corresponding rule is configured in the YAML file and the version %s is empty." - "If the value of unique_id is a negative number, the version may not be supported.", - unique_id) - self.add_empty_timeline_op_rule(unique_id) - if unique_id < 0: - logger.error("Advise to use a positive integer as the unique id of rules. " - "Negative numbers: %s are not recommended to use as unique id. " - "If specified invalid unique id: %s is used, an empty rule is returned by default.", - unique_id, const.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID) - return self._all_tmp_timeline_op_rule.get(unique_id) - - -class OpRule: - - def __init__(self, rule=None, timeline_op_rule_handler=None): - if rule is None: - self._tmp_rule = {} - else: - self._tmp_rule = copy.deepcopy(rule) - if timeline_op_rule_handler is None: - self.timeline_op_rule_handler = {} - else: - self.timeline_op_rule_handler = copy.deepcopy(timeline_op_rule_handler) - self._rule = {} - - @property - def tmp_rule(self): - return self._tmp_rule - - @staticmethod - def _format_rule(rule): - """格式化规则函数, 将额外规则格式化为{key,数组list}形式, 使得yaml文件中operator_rules若写成key:str形式也能正常读取""" - format_rule = {} - for key, val in rule.items(): - if not isinstance(val, list): - val = [val] - format_rule[key] = val - return format_rule - - def merge(self, extra_rule): - """合并函数, 将已有规则库与额外规则合并, 若无继承则已有规则库应为空""" - for key, val in extra_rule.items(): - for func, op_rules in val.items(): - try: - getattr(self, f"{func}")(key, op_rules) - except AttributeError: - logger.error("Undefined field and function name. Ensure that %s is correct in the rule " - "library.", func) - - def get_final_rules(self): - """获取最终的规则库""" - self._restore_rule() - return self._rule - - def add(self, key, add_rules: dict): - """新增函数, 新增已有规则库不存在的额外规则""" - if add_rules is None: - return - if self._tmp_rule.get(key) is None: - self._tmp_rule[key] = {} - format_add_rule = self._format_rule(add_rules) - for add_key, add_val in format_add_rule.items(): - logger.debug("add: %s: %s", add_key, add_val) - if add_key not in self._tmp_rule: - self._tmp_rule[key][add_key] = add_val - else: - logger.warning("This key has been written to the rule, " - "%s: %s should be written in the overwrite section", add_key, add_val) - self._tmp_rule[key][add_key].update(add_val) - - def overwrite(self, key, overwrite_rules: dict): - """重写函数, 重写已有规则库中已经存在的规则""" - if overwrite_rules is None: - return - if self._tmp_rule.get(key) is None: - self._tmp_rule[key] = {} - format_overwrite_rules = self._format_rule(overwrite_rules) - for overwrite_key, overwrite_val in format_overwrite_rules.items(): - logger.debug("overwrite: %s: %s", overwrite_key, overwrite_val) - if overwrite_key not in self._tmp_rule: - logger.warning("This key is not written to the rule. " - "%s: %s should be written in the add section", overwrite_key, overwrite_val) - self._tmp_rule[key][overwrite_key] = overwrite_val - else: - self._tmp_rule[key][overwrite_key].update(overwrite_val) - - def exclude(self, key, exclude_rules: list): - """除外函数, 将已有规则库已有的规则除外删除""" - if exclude_rules is None: - return - for exclude_key in exclude_rules: - logger.debug("exclude: %s", exclude_key) - if isinstance(exclude_key, str): - if exclude_key not in self._tmp_rule[key]: - logger.warning("This key is not written to the rule. " - "do not need to exclude: %s.", exclude_key) - continue - self._tmp_rule[key].pop(exclude_key) - else: - logger.warning("Error type rule in exclude: %s", exclude_key) - - def inherit_unique_id(self, key, inherit_unique_id): - """局部继承函数, 将规则库中指定unique_id版本覆盖指定位置""" - result_rule = self.timeline_op_rule_handler.get_tmp_timeline_op_rule_with_unique_id(inherit_unique_id) - if result_rule is not None and result_rule.get(key) is not None: - self._tmp_rule[key] = copy.deepcopy(result_rule.get(key)) - return - logger.error("Rule library version %s does not exist. ", inherit_unique_id) - - def _restore_rule(self): - for key, op_api_map in self._tmp_rule.items(): - self._rule[key] = [{op_combined: api} for op_combined, api in op_api_map.items()] - +def init_timeline_ops_db(cann_version=None, torch_version=None): + logger.debug("init operators database") -def get_file_path_by_walk(root, filename): - file_path = "" - for root, _, files in os.walk(root, topdown=True): - for name in files: - if name == filename: - file_path = os.path.join(root, name) - return file_path - return file_path + return FusionOperatorDB(cann_version=cann_version, torch_version=torch_version) def get_timeline_fusion_ops_yaml_path(): # 环境变量 ADVISOR_RULE_PATH 不为空且该路径存在, os.walk遍历其下文件, 若存在相应的规则文件则返回路径 - advisor_rule_path = os.getenv(const.ADVISOR_RULE_PATH) + advisor_rule_path = os.getenv(constant.ADVISOR_RULE_PATH) if advisor_rule_path and os.path.exists(advisor_rule_path): - specified_file_path = get_file_path_by_walk(advisor_rule_path, const.TIMELINE_FUSION_OPS_YAML_NAME) + specified_file_path = get_file_path_by_walk(advisor_rule_path, constant.TIMELINE_FUSION_OPS_YAML_NAME) if len(specified_file_path.strip()) and os.path.exists(specified_file_path): logger.debug("Successfully find The %s file which is specified by the environment variable: %s.", - specified_file_path, const.ADVISOR_RULE_PATH) + specified_file_path, constant.ADVISOR_RULE_PATH) return specified_file_path logger.warning("The %s does not exist in path: %s. Try to use cloud or default local YAML file.", - const.TIMELINE_FUSION_OPS_YAML_NAME, os.path.normpath(advisor_rule_path)) + constant.TIMELINE_FUSION_OPS_YAML_NAME, os.path.normpath(advisor_rule_path)) # 检查云文件默认保存路径文件夹下是否存在相应文件, 默认路径 ~/rules/cloud/ - cloud_file_path = os.path.join(os.path.expanduser("~"), const.CLOUD_RULE_PATH, const.TIMELINE_FUSION_OPS_YAML_NAME) + cloud_file_path = os.path.join(os.path.expanduser("~"), constant.CLOUD_RULE_PATH, constant.TIMELINE_FUSION_OPS_YAML_NAME) if os.path.exists(cloud_file_path): - logger.debug("Successfully find The cloud %s file in %s.", const.TIMELINE_FUSION_OPS_YAML_NAME, + logger.debug("Successfully find The cloud %s file in %s.", constant.TIMELINE_FUSION_OPS_YAML_NAME, cloud_file_path) return cloud_file_path # 检查本地默认文件 local_file_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), - const.DEFAULT_RULE_PATH, const.TIMELINE_FUSION_OPS_YAML_NAME) + constant.DEFAULT_RULE_PATH, constant.TIMELINE_FUSION_OPS_YAML_NAME) if not os.path.exists(local_file_path): # 若本地默认文件不存在, 则log异常信息并 logger.error("The default local YAML file does not exist. Please check the YAML file in the default path %s.", @@ -337,8 +51,8 @@ class FusionOperatorDB: def __init__(self, file_path=None, cann_version=None, torch_version=None): self.timeline_fusion_ops_yaml_path = os.path.normpath(get_timeline_fusion_ops_yaml_path()) - self.cann_version = cann_version or const.DEFAULT_CANN_VERSION - self.torch_version = torch_version or const.DEFAULT_TORCH_VERSION + self.cann_version = cann_version or constant.DEFAULT_CANN_VERSION + self.torch_version = torch_version or constant.DEFAULT_TORCH_VERSION self._supported_version_dict = {} @@ -379,9 +93,9 @@ class FusionOperatorDB: return self._optimizer_op_api_map def get_fusion_operator_with_unique_id(self, unique_id): - if unique_id == const.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID: + if unique_id == constant.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID: logger.warning("The specified unique id: %s is invalid.Please check whether the rule of the unique id " - "exists and modify the rule.", const.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID) + "exists and modify the rule.", constant.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID) return {} result_tmp_rule = self.timeline_op_rule_handler.get_tmp_timeline_op_rule_with_unique_id(unique_id) result_op_rule = OpRule(result_tmp_rule) @@ -411,7 +125,7 @@ class FusionOperatorDB: def _is_version_supported(self, db_content): """校验当前版本是否被规则库中的版本支持, 保存版本支持信息数组, 按数组或字符串的可变方式保存""" - if db_content is None : + if db_content is None: logger.warning( "The rule library is empty. Check the rule library file: %s", self.timeline_fusion_ops_yaml_path @@ -455,18 +169,18 @@ class FusionOperatorDB: def _is_version_supported_in_supported_version_dict(self, cann_version=None, torch_version=None): """校验当前版本是否存在在规则库中的版本支持字典中""" for _, supported_version in self._supported_version_dict.items(): - if self._is_version_supported_in_version(supported_version, cann_version, torch_version): + if self._is_version_supported_in_versions(supported_version, cann_version, torch_version): return True return False def _get_unique_id_in_supported_version_dict(self, cann_version=None, torch_version=None) -> int: """校验当前版本是否存在在规则库中的版本支持字典中, 在使用前请检查是否支持该版本""" for key_unique_id, supported_version in self._supported_version_dict.items(): - if self._is_version_supported_in_version(supported_version, cann_version, torch_version): + if self._is_version_supported_in_versions(supported_version, cann_version, torch_version): return key_unique_id - return const.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID + return constant.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID - def _is_version_supported_in_version(self, supported_version, cann_version=None, torch_version=None): + def _is_version_supported_in_versions(self, supported_version, cann_version=None, torch_version=None): """校验当前cann版本和torch版本是否存在在规则库中的版本支持数组的元素中""" cann_version_list = supported_version[0] if not isinstance(cann_version_list, list): @@ -485,9 +199,9 @@ class FusionOperatorDB: def _parse_db(self): """生成输出的规则库""" - self._parse(const.ATEN) - self._parse(const.DEQUEUE) - self._parse(const.OPTIMIZER) + self._parse(constant.ATEN) + self._parse(constant.DEQUEUE) + self._parse(constant.OPTIMIZER) def _parse(self, mode): """生成输出的规则库中指定部分, 如aten, Optimizer等""" @@ -521,7 +235,7 @@ class FusionOperatorDB: if not os.path.exists(file_path): logger.warning("Path: '%s' does not exist, please specific existed path of " "fusion operators yaml file by setting env '%s'", - os.path.abspath(file_path), const.ADVISOR_RULE_PATH) + os.path.abspath(file_path), constant.ADVISOR_RULE_PATH) self.is_empty = True return {} diff --git a/profiler/advisor/common/timeline/fusion_ops_rule.py b/profiler/advisor/common/timeline/fusion_ops_rule.py new file mode 100644 index 0000000000..deee68edb9 --- /dev/null +++ b/profiler/advisor/common/timeline/fusion_ops_rule.py @@ -0,0 +1,110 @@ +# Copyright (c) Huawei Technologies Co., Ltd. 2024-2024. All rights reserved. +import copy +import logging + +from profiler.advisor.utils.log import get_log_level + +logger = logging.getLogger() +logger.setLevel(get_log_level()) + + +class OpRule: + + def __init__(self, rule=None, timeline_op_rule_handler=None): + if rule is None: + self._tmp_rule = {} + else: + self._tmp_rule = copy.deepcopy(rule) + if timeline_op_rule_handler is None: + self.timeline_op_rule_handler = {} + else: + self.timeline_op_rule_handler = copy.deepcopy(timeline_op_rule_handler) + self._rule = {} + + @property + def tmp_rule(self): + return self._tmp_rule + + @staticmethod + def _format_rule(rule): + """格式化规则函数, 将额外规则格式化为{key,数组list}形式, 使得yaml文件中operator_rules若写成key:str形式也能正常读取""" + format_rule = {} + for key, val in rule.items(): + if not isinstance(val, list): + val = [val] + format_rule[key] = val + return format_rule + + def merge(self, extra_rule): + """合并函数, 将已有规则库与额外规则合并, 若无继承则已有规则库应为空""" + for key, val in extra_rule.items(): + for func, op_rules in val.items(): + try: + getattr(self, f"{func}")(key, op_rules) + except AttributeError: + logger.error("Undefined field and function name. Ensure that %s is correct in the rule " + "library.", func) + + def get_final_rules(self): + """获取最终的规则库""" + self._restore_rule() + return self._rule + + def add(self, key, add_rules: dict): + """新增函数, 新增已有规则库不存在的额外规则""" + if add_rules is None: + return + if self._tmp_rule.get(key) is None: + self._tmp_rule[key] = {} + format_add_rule = self._format_rule(add_rules) + for add_key, add_val in format_add_rule.items(): + logger.debug("add: %s: %s", add_key, add_val) + if add_key not in self._tmp_rule: + self._tmp_rule[key][add_key] = add_val + else: + logger.warning("This key has been written to the rule, " + "%s: %s should be written in the overwrite section", add_key, add_val) + self._tmp_rule[key][add_key].update(add_val) + + def overwrite(self, key, overwrite_rules: dict): + """重写函数, 重写已有规则库中已经存在的规则""" + if overwrite_rules is None: + return + if self._tmp_rule.get(key) is None: + self._tmp_rule[key] = {} + format_overwrite_rules = self._format_rule(overwrite_rules) + for overwrite_key, overwrite_val in format_overwrite_rules.items(): + logger.debug("overwrite: %s: %s", overwrite_key, overwrite_val) + if overwrite_key not in self._tmp_rule: + logger.warning("This key is not written to the rule. " + "%s: %s should be written in the add section", overwrite_key, overwrite_val) + self._tmp_rule[key][overwrite_key] = overwrite_val + else: + self._tmp_rule[key][overwrite_key].update(overwrite_val) + + def exclude(self, key, exclude_rules: list): + """除外函数, 将已有规则库已有的规则除外删除""" + if exclude_rules is None: + return + for exclude_key in exclude_rules: + logger.debug("exclude: %s", exclude_key) + if isinstance(exclude_key, str): + if exclude_key not in self._tmp_rule[key]: + logger.warning("This key is not written to the rule. " + "do not need to exclude: %s.", exclude_key) + continue + self._tmp_rule[key].pop(exclude_key) + else: + logger.warning("Error type rule in exclude: %s", exclude_key) + + def inherit_unique_id(self, key, inherit_unique_id): + """局部继承函数, 将规则库中指定unique_id版本覆盖指定位置""" + result_rule = self.timeline_op_rule_handler.get_tmp_timeline_op_rule_with_unique_id(inherit_unique_id) + if result_rule is not None and result_rule.get(key) is not None: + self._tmp_rule[key] = copy.deepcopy(result_rule.get(key)) + return + logger.error("Rule library version %s does not exist. ", inherit_unique_id) + + def _restore_rule(self): + for key, op_api_map in self._tmp_rule.items(): + self._rule[key] = [{op_combined: api} for op_combined, api in op_api_map.items()] diff --git a/profiler/advisor/common/timeline/fusion_ops_rule_handler.py b/profiler/advisor/common/timeline/fusion_ops_rule_handler.py new file mode 100644 index 0000000000..b0558cca6d --- /dev/null +++ b/profiler/advisor/common/timeline/fusion_ops_rule_handler.py @@ -0,0 +1,193 @@ +# Copyright (c) Huawei Technologies Co., Ltd. 2024-2024. All rights reserved. +import copy +import logging + +from profiler.advisor.common import constant +from profiler.advisor.common.timeline.fusion_ops_rule import OpRule +from profiler.advisor.utils.log import get_log_level + +logger = logging.getLogger() +logger.setLevel(get_log_level()) + + +class TimelineOpRuleHandler: + """基于线性规划思想保存OpRule,用于局部继承、全局继承等功能""" + + def __init__(self): + self._db_content = None + # 具体生成的timeline规则,key为unique_id + self._all_tmp_timeline_op_rule = {} + # 所有timeline规则的dict集合,key为unique_id + self._all_origin_timeline_op_rule_dict = {} + # 已生成timeline规则的id数组 + self._exist_timeline_op_rule_unique_id_list = [] + + @staticmethod + def _get_local_inherit_id_list(op_rule: dict): + local_inherit_id_list = [] + for _, val in op_rule.items(): + if val.get("inherit_unique_id") is not None: + local_inherit_id_list.append(val.get("inherit_unique_id")) + return local_inherit_id_list + + @staticmethod + def _is_duplicated_element_in_lists(list_a, list_b): + """检查两个数组中是否存在重复的元素,若有任意元素重复,返回True""" + if not isinstance(list_a, list): + list_a = [list_a] + if not isinstance(list_b, list): + list_b = [list_b] + # 将两个数组合并为一个列表,使用集合(set)判断列表中是否存在重复元素 + combined_list = list_a + list_b + if len(combined_list) != len(set(combined_list)): + return True + return False + + def set_db_content(self, db_content): + # 过滤非 dict 格式, 或 dict 中没有定义 unique_id 的数据, 并保存到 _all_origin_timeline_op_rule_dict 中 + self._db_content = copy.deepcopy(db_content) + for rule_dic in self._db_content: + if not isinstance(rule_dic, dict) or rule_dic.get("unique_id") is None: + continue + self._all_origin_timeline_op_rule_dict[rule_dic.get("unique_id")] = rule_dic + if self._all_origin_timeline_op_rule_dict: + self.generate_all_timeline_op_rule() + + def generate_basic_timeline_op_rules(self): + """用于实现获取无全局继承规则, 无全局继承的规则认为是基础版本规则, 默认不会存在局部继承""" + for _, rule_dic in self._all_origin_timeline_op_rule_dict.items(): + if rule_dic.get("inherit_unique_id") is None: + self.add_basic_timeline_op_rule(rule_dic) + + def add_basic_timeline_op_rule(self, rule_dic): + # 若基础规则中存在局部继承的规则,则跳过 + local_inherit_id_list = self._get_local_inherit_id_list(rule_dic.get("operator_rules")) + if local_inherit_id_list: + return + + temp_rule = OpRule() + temp_rule.merge(rule_dic.get("operator_rules")) + + unique_id = rule_dic.get("unique_id") + logger.debug("The rule of version %s is basic rule.", unique_id) + self.add_new_timeline_op_rule(unique_id, temp_rule.tmp_rule) + + def add_empty_timeline_op_rule(self, unique_id): + if self._all_origin_timeline_op_rule_dict.get(unique_id) is None: + self._all_origin_timeline_op_rule_dict[unique_id] = {} + tmp_rule = {} + logger.debug("The rule of version %s is empty.", unique_id) + self.add_new_timeline_op_rule(unique_id, tmp_rule) + + def add_new_timeline_op_rule(self, unique_id, tmp_rule): + if unique_id not in self._exist_timeline_op_rule_unique_id_list: + self._exist_timeline_op_rule_unique_id_list.append(unique_id) + self._all_tmp_timeline_op_rule[unique_id] = tmp_rule + logger.debug("The rule of version %s is successfully generated.", unique_id) + + def generate_specified_list_timeline_op_rule(self, specified_unique_id_list, kid_id_list=None): + for specified_unique_id in specified_unique_id_list: + if specified_unique_id in self._exist_timeline_op_rule_unique_id_list: + self.generate_specified_timeline_op_rule(specified_unique_id, kid_id_list) + + def generate_specified_timeline_op_rule(self, specified_unique_id, kid_id_list=None): + """用于实现生成特定版本规则 + + 若不存在相应specified_unique_id的规则、或是已生成、循环继承等情况,将该规则置空并返回 + 规则库文件结构设置为多叉树, 结构决定了不断向下搜索最终应该是从基础版本开始继承, 递归生成, + 直到specified_unique_id规则依赖继承的规则库全部生成完毕, 再生成该指定规则库, 将specified_unique_id的规则库归档 + + 参数: + specified_unique_id: 指定版本规则id + kid_id_list: 子规则id数组, 用于防止循环继承, 如间接继承自身或直接继承自身等情况 + 返回: + None + """ + if kid_id_list is None: + kid_id_list = [] + + # 若该unique_id规则在timeline_fusion_ops.yaml中没有相应的规则, 生成该id规则,置为空 + if self._all_origin_timeline_op_rule_dict.get(specified_unique_id) is None: + logger.warning("The specified version %s does not exist in the rule library. " + "Ensure that the corresponding rule is configured in the YAML file. " + "The version %s is left blank.", + specified_unique_id, + specified_unique_id) + self.add_empty_timeline_op_rule(specified_unique_id) + return + + # 若该unique_id规则已经生成,则无需再次生成 + if specified_unique_id in self._exist_timeline_op_rule_unique_id_list: + logger.warning("The rule has been generated and does not need to be generated again. " + "Check whether unique id %s in the YAML file is duplicate.", + specified_unique_id) + return + + # 若kid_id_list不为空,且间接继承自身,则尝试生成空规则用于继承 + if kid_id_list and self._is_duplicated_element_in_lists(specified_unique_id, kid_id_list): + logger.warning("It cannot be inherited indirectly. Ensure that the corresponding rules are correctly " + "configured in the YAML file and leave Version %s blank.", + specified_unique_id) + self.add_empty_timeline_op_rule(specified_unique_id) + return + + rule_dic = self._all_origin_timeline_op_rule_dict.get(specified_unique_id) + if rule_dic is not None: + kid_id_list.append(specified_unique_id) + + global_inherit_id = rule_dic.get("inherit_unique_id") + if global_inherit_id and global_inherit_id not in self._exist_timeline_op_rule_unique_id_list: + logger.debug("The rule of version %s global inherit the rule of version %s", + specified_unique_id, global_inherit_id) + self.generate_specified_timeline_op_rule(global_inherit_id, kid_id_list) + + # 若局部继承的规则未生成, 生成该规则 + local_inherit_id_list = self._get_local_inherit_id_list(rule_dic.get("operator_rules")) + if local_inherit_id_list: + logger.debug("The rule of version %s local inherit the rule of version %s", + specified_unique_id, local_inherit_id_list) + self.generate_specified_list_timeline_op_rule(specified_unique_id_list=local_inherit_id_list, + kid_id_list=kid_id_list) + logger.debug("Start to generate rule of version %s", specified_unique_id) + # 实现全局继承与局部继承 + temp_rule = OpRule(timeline_op_rule_handler=self, + rule=self._all_tmp_timeline_op_rule.get(global_inherit_id)) + temp_rule.merge(rule_dic.get("operator_rules")) + # 将生成的规则归档保存 + self.add_new_timeline_op_rule(specified_unique_id, temp_rule.tmp_rule) + return + logger.error("Failed to generate the rule whose unique_id is %s. Ensure that the rule is configured in " + "the YAML file and the version %s is empty.", specified_unique_id, specified_unique_id) + self.add_empty_timeline_op_rule(specified_unique_id) + + def generate_all_timeline_op_rule(self): + """用于实现获取所有版本规则 + + 查找db_content中的规则库, 规则库文件结构设置为多叉树, 优先生成无继承的基础规则版本 + 循环并生成其他版本, 文件结构决定了不断向下搜索最终应该是从基础版本开始继承, 递归生成,直到全部规则库生成后退出函数 + + 参数: + None + 返回: + None + """ + self.generate_basic_timeline_op_rules() + _unique_id_list = copy.deepcopy(list(self._all_origin_timeline_op_rule_dict.keys())) + for unique_id in _unique_id_list: + if unique_id in self._exist_timeline_op_rule_unique_id_list: + continue + self.generate_specified_timeline_op_rule(unique_id) + + def get_tmp_timeline_op_rule_with_unique_id(self, unique_id): + if unique_id not in self._exist_timeline_op_rule_unique_id_list: + logger.error("The specified unique_id does not exist in the rule library. Ensure that the " + "corresponding rule is configured in the YAML file and the version %s is empty." + "If the value of unique_id is a negative number, the version may not be supported.", + unique_id) + self.add_empty_timeline_op_rule(unique_id) + if unique_id < 0: + logger.error("Advise to use a positive integer as the unique id of rules. " + "Negative numbers: %s are not recommended to use as unique id. " + "If specified invalid unique id: %s is used, an empty rule is returned by default.", + unique_id, constant.TIMELINE_FUSION_OPS_INVALID_UNIQUE_ID) + return self._all_tmp_timeline_op_rule.get(unique_id) diff --git a/profiler/advisor/config/config.ini b/profiler/advisor/config/config.ini index b8f6703685..7185a6b3bf 100644 --- a/profiler/advisor/config/config.ini +++ b/profiler/advisor/config/config.ini @@ -9,8 +9,8 @@ tune_ops_file = operator_tuning_file.cfg [THRESHOLD] # operator_bound_ratio: (mte, cube, vector, scalar) ratio greater than this value will be checked in operator_bound_checker operator_bound_ratio = 0.8 -[RULE] +[RULE-BUCKET] # region : URL of different regions where can download rule yaml file -cn-north-9 = https://cnnorth9-modelarts-sdk.obs.cn-north-9.myhuaweicloud.com/modelarts/solution/ma_advisor_rules/ -cn-southwest-2 = https://cnsouthwest2-modelarts-sdk.obs.cn-southwest-2.myhuaweicloud.com/modelarts/solution/ma_advisor_rules/ -cn-north-7 = https://cnnorth7-modelarts-sdk.obs.cn-north-7.ulanqab.huawei.com/modelarts/solution/ma_advisor_rules/ \ No newline at end of file +cn-north-9 = cnnorth9-modelarts-sdk +cn-southwest-2 = cnsouthwest2-modelarts-sdk +cn-north-7 = cnnorth7-modelarts-sdk \ No newline at end of file diff --git a/profiler/advisor/dataset/cluster/cluster_dataset.py b/profiler/advisor/dataset/cluster/cluster_dataset.py index ee8b3563b7..4db50464ef 100644 --- a/profiler/advisor/dataset/cluster/cluster_dataset.py +++ b/profiler/advisor/dataset/cluster/cluster_dataset.py @@ -2,6 +2,7 @@ import logging import os +from profiler.advisor.dataset.dataset import Dataset from profiler.advisor.utils.utils import singleton from profiler.cluster_analyse.common_func.file_manager import FileManager from profiler.advisor.common import constant as const @@ -13,10 +14,10 @@ from profiler.advisor.dataset.cluster.cluster_step_trace_time_bean import Cluste logger = logging.getLogger() -class ClusterDataset: +class ClusterDataset(Dataset): - def __init__(self, collection_path, **kwargs) -> None: - self.collection_path = os.path.realpath(collection_path) + def __init__(self, collection_path, data: dict, **kwargs) -> None: + super().__init__(collection_path, data) if not self.is_cluster_analysis_output_exist(): self.cluster_analyze() @@ -64,14 +65,14 @@ class ClusterDataset: class ClusterStepTraceTimeDataSet(ClusterDataset): RANK = "rank" - def __init__(self, collection_path: str, kwargs: dict = None): - super().__init__(collection_path) + def __init__(self, collection_path: str, data: dict, kwargs: dict = None): self._step_dict = defaultdict() - self.parse() + super().__init__(collection_path, data) - def parse(self): + def _parse(self): step_data = self.load_csv_data(const.CLUSTER_STEP_TIME_CSV, ClusterStepTraceTimeBean) self._step_dict = self.formate_data(step_data) + return True def formate_data(self, step_data: list): step_dict = defaultdict(lambda: [0, 0, 0]) @@ -100,15 +101,14 @@ class ClusterCommunicationDataSet(ClusterDataset): SDMA = "SDMA" RDMA = "RDMA" - def __init__(self, collection_path: str, kwargs: dict = None): - super().__init__(collection_path) + def __init__(self, collection_path: str, data: dict, kwargs: dict = None): self.rank_bw_dict = defaultdict(lambda: { self.RDMA_TIME_MS: 0, self.RDMA_SIZE_MB: 0, self.SDMA_TIME_MS: 0, self.SDMA_SIZE_MB: 0, }) - self.parse() + super().__init__(collection_path, data) @staticmethod def compute_ratio(dividend: float, divisor: float): @@ -117,9 +117,10 @@ class ClusterCommunicationDataSet(ClusterDataset): else: return round(dividend / divisor, 4) - def parse(self): + def _parse(self): communication_json = self.load_json_data(const.CLUSTER_COMM_JSON) self.process(communication_json) + return True def process(self, communication_json: dict): for comm_group, group_dict in communication_json.items(): diff --git a/profiler/advisor/dataset/dataset.py b/profiler/advisor/dataset/dataset.py new file mode 100644 index 0000000000..7f1e40a38b --- /dev/null +++ b/profiler/advisor/dataset/dataset.py @@ -0,0 +1,38 @@ +""" +dataset module +""" +import logging +import os + +from profiler.advisor.config.config import Config + +logger = logging.getLogger() + + +class Dataset: + """ + :param collection_path: dataSet absolute path + dataset base class + """ + + def __init__(self, collection_path, data=None) -> None: + if data is None: + data = {} + self.collection_path = os.path.abspath(os.path.join(Config().work_path, collection_path)) + logger.debug("init %s with %s", self.__class__.__name__, self.collection_path) + if self._parse(): + key = self.get_key() + if key not in data: + data[key] = [] + data[key].append(self) + + def _parse(self): + return None + + @classmethod + def get_key(cls): + """ + get key of dataset + :return: key + """ + return cls.__name__.rsplit('.', maxsplit=1)[-1] diff --git a/profiler/advisor/dataset/timeline_event_dataset.py b/profiler/advisor/dataset/timeline_event_dataset.py index c1134a9784..9b4c102dff 100644 --- a/profiler/advisor/dataset/timeline_event_dataset.py +++ b/profiler/advisor/dataset/timeline_event_dataset.py @@ -2,6 +2,7 @@ import logging from typing import List import ijson +from profiler.advisor.dataset.dataset import Dataset from tqdm import tqdm from profiler.advisor.common import constant as const @@ -13,17 +14,17 @@ logger = logging.getLogger() @singleton -class TimelineEventDataset: +class TimelineEventDataset(Dataset): - def __init__(self, root_dir, **kwargs) -> None: + def __init__(self, collection_path, data: dict, **kwargs) -> None: self._ops_with_task_type = {} self._ops_with_stack = {} self._torch_to_npu = {} self._acl_to_npu = set() self._aten: List[str] = [] self._optimizer: List[str] = [] - self.timeline_dir = root_dir - self.timeline_data_list = get_file_path_from_directory(root_dir, lambda file: file.endswith("trace_view.json")) + self.timeline_dir = collection_path + self.timeline_data_list = get_file_path_from_directory(collection_path, lambda file: file.endswith("trace_view.json")) self.dataset_len = None self.analysis_mode = kwargs.get("analysis_mode") self.task_type = kwargs.get("task_type") @@ -34,13 +35,14 @@ class TimelineEventDataset: logger.info("Load fusion operators database for cann version '%s' and torch version '%s'", self.cann_version, self.torch_version) - self.parse() + super().__init__(collection_path, data) if self.analysis_mode in ["op_stack", "all"]: self._task_op_names = list(set([event_key.split("-")[0] for event_key in self._ops_with_task_type.keys()])) self._post_process() + @property def ops_with_stack(self): return self._ops_with_stack @@ -69,23 +71,15 @@ class TimelineEventDataset: def aten(self): return self._aten - @classmethod - def get_key(cls): - """ - get key of dataset - :return: key - """ - return cls.__module__.rsplit('.', maxsplit=1)[-1] - - def parse(self): + def _parse(self): if len(self.timeline_data_list) == 0: logger.warning("Please ensure trace_view.json in %s, skip timeline analysis.", self.timeline_dir) return False if len(self.timeline_data_list) > 1: - logger.warning("Please ensure only one trace_view.json in %s, skip timeline analysis.", self.timeline_dir) - return False + logger.warning("Please ensure only one trace_view.json in %s, there will analysis first timeline profiling data.", self.timeline_dir) + self.timeline_data_list = [self.timeline_data_list[0]] result = self.parse_data_with_generator(self._add_event) diff --git a/profiler/advisor/interface/interface.py b/profiler/advisor/interface/interface.py index 19da350a02..156922f4d1 100644 --- a/profiler/advisor/interface/interface.py +++ b/profiler/advisor/interface/interface.py @@ -1,47 +1,44 @@ import os -from profiler.advisor.analyzer.scheduling.fusion_ops.fusion_ops_analyzer import TimelineFusionOpsAnalyzer -from profiler.advisor.analyzer.overall.overall_analyzer import OverallSummaryAnalyzer -from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset +from profiler.advisor.analyzer.schedule.fusion_ops.fusion_ops_analyzer import TimelineFusionOpsAnalyzer from profiler.advisor.utils.utils import Timer -from profiler.advisor.result.result import OptimizeResult from profiler.advisor.analyzer.cluster.slow_rank_analyser import SlowRankAnalyzer from profiler.advisor.analyzer.cluster.slow_link_analyser import SlowLinkAnalyzer class Interface: - supported_analysiser = { - "computing": [], - "scheduling": [TimelineFusionOpsAnalyzer], + supported_analyzer = { + "schedule": [TimelineFusionOpsAnalyzer], + "computation": [], "communication": [], "overall": [], "dataloader": [], "cluster": [SlowRankAnalyzer, SlowLinkAnalyzer] } - all_dimension = supported_analysiser.keys() + all_dimension = supported_analyzer.keys() def __init__(self, **kwargs): self.collection_path = os.path.realpath(kwargs.get("profiling_path")) @staticmethod - def get_analyzer(dimension, is_inference=False): - return Interface.supported_analysiser.get(dimension, []) + def get_analyzer(dimension): + return Interface.supported_analyzer.get(dimension, []) def get_result(self: any, dimension: str, render_html=False, **kwargs): """ :Param mode: affinity apis, ai cpu and so on. """ result_list = [] - analysiser_list = self.get_analyzer(dimension, kwargs.get("is_inference", False)) - for idx, clss in enumerate(analysiser_list): + analyzer_list = self.get_analyzer(dimension) + for idx, clss in enumerate(analyzer_list): if clss and callable(clss): - analysiser = clss(collection_path = self.collection_path, **kwargs) - result_list.append(analysiser.optimize()) - if render_html and idx == len(analysiser_list) - 1: - if hasattr(analysiser, "html_render"): - analysiser.html_render.render_html() - analysiser.html_render.save_to_file(f'att_advisor_{Timer().strftime}.html') + analyzer = clss(collection_path = self.collection_path, **kwargs) + result_list.append(analyzer.optimize(**kwargs)) + if render_html and idx == len(analyzer_list) - 1: + if hasattr(analyzer, "html_render"): + analyzer.html_render.render_html() + analyzer.html_render.save_to_file(f'att_advisor_{Timer().strftime}.html') return result_list diff --git a/profiler/advisor/utils/utils.py b/profiler/advisor/utils/utils.py index d7837e1e40..1a4444f1ec 100644 --- a/profiler/advisor/utils/utils.py +++ b/profiler/advisor/utils/utils.py @@ -1,499 +1,549 @@ -import json -import logging -import multiprocessing as mp -import os -import queue -import re -import stat -import time -import traceback -import types -from functools import wraps -from typing import Any, Set - -import click -import requests -from requests.adapters import HTTPAdapter -from tqdm import tqdm - -from profiler.advisor.common import constant as const -from profiler.advisor.common.timeline.fusion_ops_db import FusionOperatorDB -from profiler.advisor.common.version_control import VersionControl -from profiler.advisor.utils.log import init_logger, get_log_level - -logger = logging.getLogger() -logger.setLevel(get_log_level()) -permission_warned: Set = set() - - -def ignore_warning(exception: Exception = None): - return exception - - -class ContextObject(object): - def __init__(self): - self._debug = False - - def set_debug(self, debug=False): - self._debug = debug - - @property - def debug_mode(self): - return self._debug - - -def debug_option(f): - return click.option('--debug', '-D', - is_flag=True, - expose_value=False, - is_eager=True, - callback=init_logger, - help="Debug Mode. Shows full stack trace when error occurs.")(f) - - -def singleton(cls): - """ - :param cls: any class - :return: singleton handle - """ - _instance = {} - - def _singleton(*args: any, **kw: any) -> any: - if cls not in _instance: - _instance[cls] = cls(*args, **kw) - return _instance.get(cls) - - return _singleton - - -def lazy_property(func): - """ - Lazy loading of class attributes. - which is calculated only once when it is called for the first time, - and will not be repeated for each call after that. - """ - attr_name = "_lazy_" + func.__name__ - - @property - def _lazy_property(instance): - if not hasattr(instance, attr_name): - setattr(instance, attr_name, func(instance)) - return getattr(instance, attr_name) - - return _lazy_property - - -class CheckPathAccess: - """ - check path access permissions - """ - - # pylint: disable=no-member - def __init__(self, func): - wraps(func)(self) - self.warned = permission_warned - - def __call__(self, *args, **kwargs): - path = args[0] - if not os.access(path, os.R_OK) and path not in self.warned: - logger.warning("%s can not read, check the permissions", path) - self.warned.add(path) - return self.__wrapped__(*args, **kwargs) - - def __get__(self, instance, cls): - if instance is None: - return self - return types.MethodType(self, instance) - - -def walk_error_handler(error): - """ - handle dir walk error - """ - if error.filename not in permission_warned: - logger.warning(error) - permission_warned.add(error.filename) - - -@CheckPathAccess -def get_file_path_from_directory(path: str, check_func: Any) -> list: - """ - get file from directory - """ - file_list = [] - for root, _, files in os.walk(path, onerror=walk_error_handler): - for filename in files: - filepath = os.path.join(root, filename) - if check_func(filename): - file_list.append(filepath) - return file_list - - -@singleton -class Timer: - def __init__(self): - self.strftime = time.strftime("%Y%m%d%H%M%S", time.localtime(time.time())) - - -def get_analyze_processes(): - # n_processes not exposed to user through ma-advisor command arguments now - return min(int(os.getenv(const.MA_ADVISOR_ANALYZE_PROCESSES, 1)), const.MA_ADVISOR_MAX_PROCESSES) - - -def init_timeline_ops_db(cann_version=None, torch_version=None): - logger.debug("init operators database") - - return FusionOperatorDB(cann_version=cann_version, torch_version=torch_version) - - -def format_timeline_result(result: dict, dump_html=False): - """ - :Param result: json for api name and stack - :Return: json after format - """ - format_result = {} - if dump_html: - result = json.loads(json.dumps(result).replace("\\r\\n", "
").replace("", "<module>")) - - for key, stacks in result.items(): - api_name = key.split(":")[0] - format_result[api_name] = sorted(list(stacks.items()), key=lambda stack: stack[1], reverse=True) - return format_result - - -class ParallelJob: - - def __init__(self, src_func, ops_api_list, job_name=None): - if not callable(src_func): - raise TypeError(f"src_func should be callable") - - if not isinstance(ops_api_list, (list, tuple)): - raise TypeError(f"ops_api_list should be list or tuple") - - self.src_func = src_func - self.ops_api_list = ops_api_list - self.job_name = job_name - - def start(self, n_proccesses): - - queue = mp.Queue(len(self.ops_api_list)) - completed_queue = mp.Queue() - for i in range(len(self.ops_api_list)): - queue.put(i) - - processes = [] - listen = mp.Process(target=self.listener, args=(completed_queue, len(self.ops_api_list),)) - listen.start() - - for i in range(n_proccesses): - p = mp.Process(target=self.parallel_queue, args=(queue, completed_queue,)) - processes.append(p) - p.start() - - for p in processes: - p.join() - - completed_queue.put(None) - listen.join() - - def listener(self, completed_queue, num): - pbar = tqdm(total=num, position=0, leave=False, ncols=100, desc=self.job_name) - for _ in iter(completed_queue.get, None): - pbar.update() - pbar.refresh() - pbar.n = num - - def parallel_queue(self, job_queue, completed_queue): - while True: - try: - if job_queue.empty(): - break - token = job_queue.get(timeout=1) - except queue.Empty: - continue - self.src_func(*self.ops_api_list[token]) - completed_queue.put(token) - - -def mp_queue_to_list(job_queue): - queue_list = [] - while True: - try: - if job_queue.empty(): - break - token = job_queue.get(timeout=1) - queue_list.append(token) - except queue.Empty: - continue - return queue_list - - -def load_parameter(parameter, default): - if not os.environ.get(parameter, None): - return default - else: - return os.environ.get(parameter) - - -def get_supported_subclass(clazz: VersionControl.__class__, cann_version: str): - """ - Returns a list of subclasses that support the specified version - :param clazz: Class name which is extends to VersionControl.__class__ - :param cann_version: The CANN software version - :return: The list of subclasses that support the specified CANN version - """ - # 获取所有支持这个cann版本的子类 - dataset_classes = clazz.__subclasses__() - sub_class_list = [cls for cls in dataset_classes if cls.is_supported(cann_version)] - logger.debug("The support subclass list is %s, cann version is %s", str(sub_class_list), cann_version) - return sub_class_list - - -def to_percent(num: float) -> str: - """ - change float to percent format - """ - num = num * 100 - return f"{num:.2f}%" - - -def safe_division(numerator, denominator): - """Return 0 if denominator is 0.""" - return denominator and numerator / denominator - - -def safe_write(content, save_path): - if os.path.dirname(save_path) != "": - os.makedirs(os.path.dirname(save_path), exist_ok=True) - - with os.fdopen(os.open(save_path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, - stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP), "w") as f: - f.write(content) - - -def create_directory_for_file(file: str) -> None: - """ - create directory for file - """ - dirname = os.path.dirname(file) - if not os.path.exists(dirname): - os.makedirs(dirname) - - -class CheckPathAccess: - """ - check path access permissions - """ - - # pylint: disable=no-member - def __init__(self, func): - wraps(func)(self) - self.warned = permission_warned - - def __call__(self, *args, **kwargs): - path = args[0] - if path and not os.access(path, os.R_OK) and path not in self.warned: - logger.warning("%s can not read, check the permissions", path) - self.warned.add(path) - return self.__wrapped__(*args, **kwargs) - - def __get__(self, instance, cls): - if instance is None: - return self - return types.MethodType(self, instance) - - -@CheckPathAccess -def get_file_path_from_directory(path, check_func): - """ - get file from directory - """ - file_list = [] - - if not path: - return file_list - - if not os.path.isdir(path): - logger.warning("Expected existed directory, but got %s", path) - - for root, _, files in os.walk(path): - for filename in files: - filepath = os.path.join(root, filename) - if check_func(filename): - file_list.append(filepath) - return file_list - - -@CheckPathAccess -def get_dir_path_from_directory(path: str, check_func: Any) -> list: - """ - get file from directory - """ - file_list = [] - for root, _, files in os.walk(path, onerror=walk_error_handler): - for filename in files: - filepath = os.path.join(root, filename) - if check_func(filename): - file_list.append(filepath) - return file_list - - -def is_regex_pattern(string: str): - """ - Check if str is a regular expression. - """ - escaped_string = re.escape(string) - return not (escaped_string == string) - - -def join_prof_path(root_dir: str, sub_dir: str) -> str: - """ - regular expression matching method for path concatenation - """ - if is_regex_pattern(sub_dir): - for root, _, _ in os.walk(root_dir, onerror=walk_error_handler): - if re.match(sub_dir, os.path.basename(root)): - return root - else: - sub_dir = os.path.join(root_dir, sub_dir) - if os.path.exists(sub_dir): - return sub_dir - return "" - - -def format_excel_title(title: str) -> str: - """ - format excel title - """ - title = title.lower() - title = title.replace("(us)", '') - title = title.replace("(ns)", '') - title = title.replace("(%)", '') - title = title.replace(" ", "_") - return title - - -def format_float(num: float) -> float: - """ - format float num, round to 2 decimal places - """ - return round(num, 2) - - -class SafeOpen: - """ - safe open to check file - """ - - # pylint: disable=consider-using-with - def __init__(self, name, mode='r', encoding=None): - self.file = None - if not os.path.exists(name): - logger.warning("%s not exist, please check", name) - return - - if os.access(name, os.R_OK): - self.file = open(name, mode, encoding=encoding, errors="ignore") - else: - logger.warning("%s can not read, check the permissions", name) - - def __enter__(self): - return self.file - - def __exit__(self, exc_type, exc_val, exc_tb): - if self.file: - self.file.close() - return True - - -def save_downloaded_file(response, url_path, file_save_path): - """保存响应体中的文件 - - 参数: - response: 请求后获取的响应体 - url_path: url路径 - file_save_path: 保存路径 - 返回: - final_file_path: 文件保存绝对路径 - """ - # 获取url路径中的文件名, 拼接在保存路径下 - file_save_path = os.path.normpath(file_save_path) - file_name = os.path.basename(url_path) - final_file_path = os.path.join(file_save_path, file_name) - # 若目标保存路径不存在,则自动生成 - if not os.path.exists(file_save_path): - os.makedirs(file_save_path) - if response.status_code <= 300: - logger.debug("Response status code is %s", response.status_code) - flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL - modes = stat.S_IWUSR | stat.S_IRUSR - # 若文件已存在,则移除已有的文件并保存最新的文件 - if os.path.exists(final_file_path): - os.remove(final_file_path) - # 保存文件 - with os.fdopen(os.open(final_file_path, flags, modes), mode="wb") as f: - f.write(response.content) - logger.info("Success to save content in: %s", os.path.abspath(final_file_path)) - else: - # 若响应码不为预期的数值, 显示相应告警 - logger.warning("Failed to save the response body. The response status code is %s. " - "Please check the network or file URL", response.status_code) - - -def request_with_retry(url_path): - """使用requests请求获取文件, 失败则进行重试, 最多请求 max_retries+1 次 - - 参数: - url_path: URL路径 - file_save_path: 云文件保存路径 - """ - logger.debug("Requesting or retrying to get %s", url_path) - - # 若从环境变量指定了保存路径,优先从环境变量中获取,若为空则使用默认的云文件保存路径constant.CLOUD_RULE_PATH - file_save_path = os.path.join(os.path.expanduser("~"), const.CLOUD_RULE_PATH) - if os.getenv(const.ADVISOR_RULE_PATH): - file_save_path = os.getenv(const.ADVISOR_RULE_PATH) - - session = requests.Session() - # 使用session发起的所有请求, 默认最多会重试 max_retries 次, 计入最初请求, 最差情况下请求 max_retries+1 次 - adapter = HTTPAdapter(max_retries=const.MAX_RETRIES) - session.mount('http://', adapter) - session.mount('https://', adapter) - - logger.debug('Session try to get response') - response = None - try: - response = session.get(url_path, timeout=const.TIMEOUT) - except Exception as e: - logger.debug("Error: %s: %s", e, traceback.format_exc()) - - if response is None: - logger.warning("Fail to download: %s, response is None, " - "please use the environment variable %s for more detailed information", - url_path, const.ADVISOR_LOG_LEVEL) - else: - try: - # 若响应码为400~600之间,response.raise_for_status抛出HTTPError错误, 跳过调用save_downloaded_file函数逻辑 - response.raise_for_status() - save_downloaded_file(response, url_path=url_path, file_save_path=file_save_path) - except Exception as e: - logger.warning("Error: %s: %s", e, traceback.format_exc()) - # 关闭 session, 清除所有装配器 - session.close() - - -def read_csv(file): - import csv - - raw_data = [] - logger.debug("Parse file %s", file) - with SafeOpen(file, encoding="utf-8") as csv_file: - try: - csv_content = csv.reader(csv_file) - for row in csv_content: - raw_data.append(row) - except OSError as error: - logger.error("Read csv file failed : %s", error) - return [] - - return raw_data +import inspect +import json +import logging +import multiprocessing as mp +import os +import queue +import re +import stat +import time +import traceback +import types +from functools import wraps +from typing import Any, Set + +import click +import requests +from requests.adapters import HTTPAdapter +from tqdm import tqdm + +from profiler.advisor.common import constant as const +from profiler.advisor.common.version_control import VersionControl +from profiler.advisor.utils.log import init_logger, get_log_level + +logger = logging.getLogger() +logger.setLevel(get_log_level()) +permission_warned: Set = set() + + +def ignore_warning(exception: Exception = None): + return exception + + +class ContextObject(object): + def __init__(self): + self._debug = False + + def set_debug(self, debug=False): + self._debug = debug + + @property + def debug_mode(self): + return self._debug + + +def debug_option(f): + return click.option('--debug', '-D', + is_flag=True, + expose_value=False, + is_eager=True, + callback=init_logger, + help="Debug Mode. Shows full stack trace when error occurs.")(f) + + +def get_class_absolute_path(cls): + module = inspect.getmodule(cls) + if module is not None: + module_path = module.__name__ + class_name = cls.__name__ + return f"{module_path}.{class_name}" + else: + return None + + +def is_static_func(function_obj): + return isinstance(function_obj, staticmethod) + + +def singleton(cls): + """ + :param cls: any class + :return: singleton handle + + When using the singleton function, you need to manually specify arg='dataSet_path'. Otherwise, the singleton function + is initialized by class name. + if cls has 'arg' property, _instance map will build by class_name and 'arg', the default value of + collection path is class absolute path. + + _instance = {cls.name: {collection_path: instance}} + """ + _instance = {} + + def _singleton(*args: any, **kw: any) -> any: + collection_path = kw.get("collection_path") + if not collection_path: + collection_path = get_class_absolute_path(cls) + if cls in _instance and collection_path in _instance[cls]: + return _instance[cls].get(collection_path) + if cls not in _instance: + _instance[cls] = {collection_path: cls(*args, **kw)} + else: + _instance[cls][collection_path] = cls(*args, **kw) + return _instance[cls].get(collection_path) + + # 保留原始类的属性和方法 + _singleton.__name__ = cls.__name__ + _singleton.__module__ = cls.__module__ + _singleton.__doc__ = cls.__doc__ + + # 拷贝原始类的类方法和静态方法 + _singleton.__dict__.update(cls.__dict__) + for base_class in inspect.getmro(cls)[::-1]: + # 获取类的所有成员 + members = inspect.getmembers(base_class) + + # 过滤出函数对象 + function_objs = [member[1] for member in members if inspect.isfunction(member[1]) or inspect.ismethod(member[1])] + for function_obj in function_objs: + if inspect.isfunction(function_obj) and not is_static_func(function_obj): + continue + setattr(_singleton, function_obj.__name__, function_obj) + + return _singleton + + +def lazy_property(func): + """ + Lazy loading of class attributes. + which is calculated only once when it is called for the first time, + and will not be repeated for each call after that. + """ + attr_name = "_lazy_" + func.__name__ + + @property + def _lazy_property(instance): + if not hasattr(instance, attr_name): + setattr(instance, attr_name, func(instance)) + return getattr(instance, attr_name) + + return _lazy_property + + +class CheckPathAccess: + """ + check path access permissions + """ + + # pylint: disable=no-member + def __init__(self, func): + wraps(func)(self) + self.warned = permission_warned + + def __call__(self, *args, **kwargs): + path = args[0] + if not os.access(path, os.R_OK) and path not in self.warned: + logger.warning("%s can not read, check the permissions", path) + self.warned.add(path) + return self.__wrapped__(*args, **kwargs) + + def __get__(self, instance, cls): + if instance is None: + return self + return types.MethodType(self, instance) + + +def walk_error_handler(error): + """ + handle dir walk error + """ + if error.filename not in permission_warned: + logger.warning(error) + permission_warned.add(error.filename) + + +@CheckPathAccess +def get_file_path_from_directory(path: str, check_func: Any) -> list: + """ + get file from directory + """ + file_list = [] + for root, _, files in os.walk(path, onerror=walk_error_handler): + for filename in files: + filepath = os.path.join(root, filename) + if check_func(filename): + file_list.append(filepath) + return file_list + + +@singleton +class Timer: + def __init__(self): + self.strftime = time.strftime("%Y%m%d%H%M%S", time.localtime(time.time())) + + +def get_analyze_processes(): + # n_processes not exposed to user through att-advisor command arguments now + return min(int(os.getenv(const.MA_ADVISOR_ANALYZE_PROCESSES, 1)), const.MA_ADVISOR_MAX_PROCESSES) + + +def format_timeline_result(result: dict, dump_html=False): + """ + :Param result: json for api name and stack + :Return: json after format + """ + format_result = {} + if dump_html: + result = json.loads(json.dumps(result).replace("\\r\\n", "
").replace("", "<module>")) + + for key, stacks in result.items(): + api_name = key.split(":")[0] + format_result[api_name] = sorted(list(stacks.items()), key=lambda stack: stack[1], reverse=True) + return format_result + + +class ParallelJob: + + def __init__(self, src_func, ops_api_list, job_name=None): + if not callable(src_func): + raise TypeError(f"src_func should be callable") + + if not isinstance(ops_api_list, (list, tuple)): + raise TypeError(f"ops_api_list should be list or tuple") + + self.src_func = src_func + self.ops_api_list = ops_api_list + self.job_name = job_name + + def start(self, n_proccesses): + + job_queue = mp.Queue(len(self.ops_api_list)) + completed_queue = mp.Queue() + for i in range(len(self.ops_api_list)): + job_queue.put(i) + + processes = [] + listen = mp.Process(target=self.listener, args=(completed_queue, len(self.ops_api_list),)) + listen.start() + + for i in range(n_proccesses): + p = mp.Process(target=self.parallel_queue, args=(job_queue, completed_queue,)) + processes.append(p) + p.start() + + for p in processes: + p.join() + + completed_queue.put(None) + listen.join() + + def listener(self, completed_queue, num): + pbar = tqdm(total=num, position=0, leave=False, ncols=100, desc=self.job_name) + for _ in iter(completed_queue.get, None): + pbar.update() + pbar.refresh() + pbar.n = num + + def parallel_queue(self, job_queue, completed_queue): + while True: + try: + if job_queue.empty(): + break + token = job_queue.get(timeout=1) + except queue.Empty: + continue + self.src_func(*self.ops_api_list[token]) + completed_queue.put(token) + + +def mp_queue_to_list(job_queue): + queue_list = [] + while True: + try: + if job_queue.empty(): + break + token = job_queue.get(timeout=1) + queue_list.append(token) + except queue.Empty: + continue + return queue_list + + +def load_parameter(parameter, default): + if not os.environ.get(parameter, None): + return default + else: + return os.environ.get(parameter) + + +def get_supported_subclass(clazz: VersionControl.__class__, cann_version: str): + """ + Returns a list of subclasses that support the specified version + :param clazz: Class name which is extends to VersionControl.__class__ + :param cann_version: The CANN software version + :return: The list of subclasses that support the specified CANN version + """ + # 获取所有支持这个cann版本的子类 + dataset_classes = clazz.__subclasses__() + sub_class_list = [cls for cls in dataset_classes if cls.is_supported(cann_version)] + logger.debug("The support subclass list is %s, cann version is %s", str(sub_class_list), cann_version) + return sub_class_list + + +def to_percent(num: float) -> str: + """ + change float to percent format + """ + num = num * 100 + return f"{num:.2f}%" + + +def safe_division(numerator, denominator): + """Return 0 if denominator is 0.""" + return denominator and numerator / denominator + + +def safe_write(content, save_path): + if os.path.dirname(save_path) != "": + os.makedirs(os.path.dirname(save_path), exist_ok=True) + + with os.fdopen(os.open(save_path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, + stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP), "w") as f: + f.write(content) + + +def create_directory_for_file(file: str) -> None: + """ + create directory for file + """ + dirname = os.path.dirname(file) + if not os.path.exists(dirname): + os.makedirs(dirname) + + +class CheckPathAccess: + """ + check path access permissions + """ + + # pylint: disable=no-member + def __init__(self, func): + wraps(func)(self) + self.warned = permission_warned + + def __call__(self, *args, **kwargs): + path = args[0] + if path and not os.access(path, os.R_OK) and path not in self.warned: + logger.warning("%s can not read, check the permissions", path) + self.warned.add(path) + return self.__wrapped__(*args, **kwargs) + + def __get__(self, instance, cls): + if instance is None: + return self + return types.MethodType(self, instance) + + +@CheckPathAccess +def get_file_path_from_directory(path, check_func): + """ + get file from directory + """ + file_list = [] + + if not path: + return file_list + + if not os.path.isdir(path): + logger.warning("Expected existed directory, but got %s", path) + + for root, _, files in os.walk(path): + for filename in files: + filepath = os.path.join(root, filename) + if check_func(filename): + file_list.append(filepath) + return file_list + + +@CheckPathAccess +def get_dir_path_from_directory(path: str, check_func: Any) -> list: + """ + get file from directory + """ + file_list = [] + for root, _, files in os.walk(path, onerror=walk_error_handler): + for filename in files: + filepath = os.path.join(root, filename) + if check_func(filename): + file_list.append(filepath) + return file_list + + +def is_regex_pattern(string: str): + """ + Check if str is a regular expression. + """ + escaped_string = re.escape(string) + return not (escaped_string == string) + + +def join_prof_path(root_dir: str, sub_dir: str) -> str: + """ + regular expression matching method for path concatenation + """ + if is_regex_pattern(sub_dir): + for root, _, _ in os.walk(root_dir, onerror=walk_error_handler): + if re.match(sub_dir, os.path.basename(root)): + return root + else: + sub_dir = os.path.join(root_dir, sub_dir) + if os.path.exists(sub_dir): + return sub_dir + return "" + + +def format_excel_title(title: str) -> str: + """ + format excel title + """ + title = title.lower() + title = title.replace("(us)", '') + title = title.replace("(ns)", '') + title = title.replace("(%)", '') + title = title.replace(" ", "_") + return title + + +def format_float(num: float) -> float: + """ + format float num, round to 2 decimal places + """ + return round(num, 2) + + +class SafeOpen: + """ + safe open to check file + """ + + # pylint: disable=consider-using-with + def __init__(self, name, mode='r', encoding=None): + self.file = None + if not os.path.exists(name): + logger.warning("%s not exist, please check", name) + return + + if os.access(name, os.R_OK): + self.file = open(name, mode, encoding=encoding, errors="ignore") + else: + logger.warning("%s can not read, check the permissions", name) + + def __enter__(self): + return self.file + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.file: + self.file.close() + return True + + +def save_downloaded_file(response, url_path, file_save_path): + """保存响应体中的文件 + + 参数: + response: 请求后获取的响应体 + url_path: url路径 + file_save_path: 保存路径 + 返回: + final_file_path: 文件保存绝对路径 + """ + # 获取url路径中的文件名, 拼接在保存路径下 + file_save_path = os.path.normpath(file_save_path) + file_name = os.path.basename(url_path) + final_file_path = os.path.join(file_save_path, file_name) + # 若目标保存路径不存在,则自动生成 + if not os.path.exists(file_save_path): + os.makedirs(file_save_path) + if response.status_code <= 300: + logger.debug("Response status code is %s", response.status_code) + flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL + modes = stat.S_IWUSR | stat.S_IRUSR + # 若文件已存在,则移除已有的文件并保存最新的文件 + if os.path.exists(final_file_path): + os.remove(final_file_path) + # 保存文件 + with os.fdopen(os.open(final_file_path, flags, modes), mode="wb") as f: + f.write(response.content) + logger.info("Success to save content in: %s", os.path.abspath(final_file_path)) + else: + # 若响应码不为预期的数值, 显示相应告警 + logger.warning("Failed to save the response body. The response status code is %s. " + "Please check the network or try another region", response.status_code) + + +def request_with_retry(url_path, region_name=None): + """使用requests请求获取文件, 失败则进行重试, 最多请求 max_retries+1 次 + + 参数: + url_path: URL路径 + file_save_path: 云文件保存路径 + """ + logger.debug("Requesting or retrying to get file from region: %s", region_name) + + # 若从环境变量指定了保存路径,优先从环境变量中获取,若为空则使用默认的云文件保存路径constant.CLOUD_RULE_PATH + file_save_path = os.path.join(os.path.expanduser("~"), const.CLOUD_RULE_PATH) + if os.getenv(const.ADVISOR_RULE_PATH): + file_save_path = os.getenv(const.ADVISOR_RULE_PATH) + + session = requests.Session() + # 使用session发起的所有请求, 默认最多会重试 max_retries 次, 计入最初请求, 最差情况下请求 max_retries+1 次 + adapter = HTTPAdapter(max_retries=const.MAX_RETRIES) + session.mount(const.HTTP_PREFIXES, adapter) + session.mount(const.HTTPS_PREFIXES, adapter) + + logger.debug('Session try to get response') + response = None + try: + response = session.get(url_path, timeout=const.TIMEOUT) + except Exception as e: + logger.debug("Error: %s: %s", e, traceback.format_exc()) + + if response is None: + logger.warning("Fail to download file from region: %s, response is None, " + "please use the environment variable %s for more detailed information", + region_name, const.ADVISOR_LOG_LEVEL) + else: + try: + # 若响应码为400~600之间,response.raise_for_status抛出HTTPError错误, 跳过调用save_downloaded_file函数逻辑 + response.raise_for_status() + save_downloaded_file(response, url_path=url_path, file_save_path=file_save_path) + except Exception as e: + logger.warning("Error: %s: %s", e, traceback.format_exc()) + # 关闭 session, 清除所有装配器 + session.close() + + +def read_csv(file): + import csv + + raw_data = [] + logger.debug("Parse file %s", file) + with SafeOpen(file, encoding="utf-8") as csv_file: + try: + csv_content = csv.reader(csv_file) + for row in csv_content: + raw_data.append(row) + except OSError as error: + logger.error("Read csv file failed : %s", error) + return [] + + return raw_data + + +def get_file_path_by_walk(root, filename): + file_path = "" + for root, _, files in os.walk(root, topdown=True): + for name in files: + if name == filename: + file_path = os.path.join(root, name) + return file_path + return file_path diff --git a/profiler/cli/analyze_cli.py b/profiler/cli/analyze_cli.py index 2efecffcb7..95d8fa2f53 100644 --- a/profiler/cli/analyze_cli.py +++ b/profiler/cli/analyze_cli.py @@ -19,8 +19,8 @@ def _analyze(dimensions, **kwargs): job_list = [] for dimension in dimensions: - interface = Interface(**kwargs) - job_list.append((dimension, interface)) + interface = Interface(**kwargs) + job_list.append((dimension, interface)) for i, (dimension, interface) in enumerate(job_list[::-1]): result_list += interface.get_result(dimension, render_html=i == len(job_list) - 1) @@ -41,8 +41,9 @@ def analyze_cli(**kwargs): name="all", short_help='Analyze timeline, operators and graph.') @click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, - help='path of trace_view.json in profiling') -@click.option('--benchmark_profiling_path', '-bp', 'benchmark_profiling_path', type=click.Path()) + help='Directory of profiling data') +@click.option('--benchmark_profiling_path', '-bp', 'benchmark_profiling_path', type=click.Path(), + help='Directory of benchmark profiling data, used for compare performance') @click.option('--cann_version', '-cv', 'cann_version', type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), default=constant.DEFAULT_CANN_VERSION, @@ -52,7 +53,14 @@ def analyze_cli(**kwargs): type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), default=constant.DEFAULT_TORCH_VERSION, help='The runtime torch version, which can be detected by exec command "pip show torch"') -@click.option('--is_inference', is_flag=True) +# @click.option('--is_inference', is_flag=True, help="Enable performance analysis of inference task") +@click.option("-pt", + "--profiling_type", + metavar="", + default=constant.ASCEND_PYTORCH_PROFILER, + required=False, + type=click.Choice(constant.SUPPORTED_PROFILING_TYPE), + help="enter the profiling type, selectable range ascend_pytorch_profiler, mslite ,msprof") @debug_option def analyze_all(**kwargs) -> None: # 当前compare_tools必须输入两个profiling路径,att-advisor有等价功能支持输入一个Profiling路径,后续替换成对应实现 @@ -66,8 +74,7 @@ def analyze_all(**kwargs) -> None: name="communication", short_help='Analyze timeline, operators and graph.') @click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, - help='path of trace_view.json in profiling') -@click.option('--benchmark_profiling_path', '-bp', 'benchmark_profiling_path', type=click.Path()) + help='Directory of profiling data') @click.option('--cann_version', '-cv', 'cann_version', type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), default=constant.DEFAULT_CANN_VERSION, @@ -77,19 +84,16 @@ def analyze_all(**kwargs) -> None: type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), default=constant.DEFAULT_TORCH_VERSION, help='The runtime torch version, which can be detected by exec command "pip show torch"') -@click.option('--mode', '-m', 'mode', default=None) -@click.option('--is_inference', is_flag=True) @debug_option def analyze_communication(**kwargs) -> None: _analyze(["communication"], **kwargs) @analyze_cli.command(context_settings=CONTEXT_SETTINGS, - name="scheduling", + name="schedule", short_help='Analyze timeline, operators and graph.') @click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, - help='path of trace_view.json in profiling') -@click.option('--benchmark_profiling_path', '-bp', 'benchmark_profiling_path', type=click.Path()) + help='Directory of profiling data') @click.option('--cann_version', '-cv', 'cann_version', type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), default=constant.DEFAULT_CANN_VERSION, @@ -99,19 +103,16 @@ def analyze_communication(**kwargs) -> None: type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), default=constant.DEFAULT_TORCH_VERSION, help='The runtime torch version, which can be detected by exec command "pip show torch"') -@click.option('--mode', '-m', 'mode', default=None) -@click.option('--is_inference', is_flag=True) @debug_option -def analyze_scheduling(**kwargs) -> None: - _analyze(["scheduling"], **kwargs) +def analyze_schedule(**kwargs) -> None: + _analyze(["schedule"], **kwargs) @analyze_cli.command(context_settings=CONTEXT_SETTINGS, - name="computing", + name="computation", short_help='Analyze timeline, operators and graph.') @click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, - help='path of trace_view.json in profiling') -@click.option('--benchmark_profiling_path', '-bp', 'benchmark_profiling_path', type=click.Path()) + help='Directory of profiling data') @click.option('--cann_version', '-cv', 'cann_version', type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), default=constant.DEFAULT_CANN_VERSION, @@ -121,8 +122,33 @@ def analyze_scheduling(**kwargs) -> None: type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), default=constant.DEFAULT_TORCH_VERSION, help='The runtime torch version, which can be detected by exec command "pip show torch"') -@click.option('--mode', '-m', 'mode', default=None) -@click.option('--is_inference', is_flag=True) +@click.option("-pt", + "--profiling_type", + metavar="", + default=constant.ASCEND_PYTORCH_PROFILER, + required=False, + type=click.Choice(constant.SUPPORTED_PROFILING_TYPE), + help="enter the profiling type, selectable range ascend_pytorch_profiler, mslite ,msprof") @debug_option -def analyze_computing(**kwargs) -> None: - _analyze(["computing"], **kwargs) +def analyze_computation(**kwargs) -> None: + _analyze(["computation"], **kwargs) + + +@analyze_cli.command(context_settings=CONTEXT_SETTINGS, + name="dataloader", + short_help='Analyze timeline, operators and graph.') +@click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, + help='Directory of profiling data') +@click.option('--cann_version', '-cv', 'cann_version', + type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), + default=constant.DEFAULT_CANN_VERSION, + help='The CANN software version, which can be viewed by executing the following command: ' + '"cat /usr/local/Ascend/ascend-toolkit/latest/aarch64-linux/ascend_toolkit_install.info"') +@click.option('--torch_version', '-tv', 'torch_version', + type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), + default=constant.DEFAULT_TORCH_VERSION, + help='The runtime torch version, which can be detected by exec command "pip show torch"') +@click.option('--is_inference', is_flag=True, help="Enable performance analysis of inference task") +@debug_option +def analyze_dataloader(**kwargs) -> None: + _analyze(["dataloader"], **kwargs) diff --git a/profiler/cli/entrance.py b/profiler/cli/entrance.py index b14d3dfd86..d9b5b10da7 100644 --- a/profiler/cli/entrance.py +++ b/profiler/cli/entrance.py @@ -53,7 +53,7 @@ advisor_cli.add_command(compare_cli, name="compare") if __name__ == '__main__': advisor_cli.main( - ["analyze", "scheduling", "-d", + ["analyze", "schedule", "-d", r"/home/ma-user/work/profiling", ] ) diff --git a/profiler/cli/update_cli.py b/profiler/cli/update_cli.py new file mode 100644 index 0000000000..9407981ae0 --- /dev/null +++ b/profiler/cli/update_cli.py @@ -0,0 +1,40 @@ +from urllib import parse + +import click + +from profiler.advisor.common import constant +from profiler.advisor.config.config import Config +from profiler.advisor.utils.tools import CONTEXT_SETTINGS, ClickAliasedGroup +from profiler.advisor.utils.utils import debug_option, request_with_retry + + +@click.group(name="update", cls=ClickAliasedGroup) +def update_cli(**kwargs): + """Update operation command, such as update rule and specify save path.""" + pass + + +@update_cli.command(context_settings=CONTEXT_SETTINGS, + name="rule", + short_help='Update the ma-advisor rules on the terminal. The default save path is ' + '"~/rules/cloud/". If user want to specify the save path, please use the environment ' + 'variable "ADVISOR_RULE_PATH"') +@click.option('--region', '-r', type=click.Choice(constant.CLOUD_RULE_REGION_LIST), required=True, + default=constant.DEFAULT_CLOUD_RULE_REGION, + help='Specifies the region where the rule file is downloaded.') +@debug_option +def update_rule(**kwargs) -> None: + """ + Download the latest rule yaml file. + """ + region_name = kwargs.get("region") + rule_bucket = Config().config.get(constant.RULE_BUCKET, region_name) + rule_endpoint_suffix = constant.COMMON_ENDPOINT_SUFFIX.format(region_name) + if region_name in constant.INNER_REGION_LIST: + rule_endpoint_suffix = constant.INNER_ENDPOINT_SUFFIX.format(region_name) + + obs_url = constant.HTTPS_PREFIXES + rule_bucket + "." + rule_endpoint_suffix + obs_url = parse.urljoin(obs_url, constant.COMMON_YAML_DIR) + for file_name in constant.CLOUD_YAML_NAME_LIST: + url = parse.urljoin(obs_url, file_name) + request_with_retry(url, region_name) diff --git a/requirements/test.txt b/requirements/tests.txt similarity index 95% rename from requirements/test.txt rename to requirements/tests.txt index 3bacb7ca55..bab89704aa 100644 --- a/requirements/test.txt +++ b/requirements/tests.txt @@ -1,5 +1,5 @@ -pytest==6.2.4 -pytest-cov==2.12.0 -pytest-mock==3.6.1 -pytest-cookies==0.6.1 +pytest==6.2.4 +pytest-cov==2.12.0 +pytest-mock==3.6.1 +pytest-cookies==0.6.1 mock==4.0.3 \ No newline at end of file diff --git a/version.txt b/version.txt index 7bcd0e3612..9f8e9b69a3 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.0.2 \ No newline at end of file +1.0 \ No newline at end of file -- Gitee From c7bb1aa18ec9eb77c3018f273692950f2f9ba4e2 Mon Sep 17 00:00:00 2001 From: wuyuhan Date: Tue, 14 May 2024 15:13:06 +0800 Subject: [PATCH 10/21] =?UTF-8?q?advisor=20interface=E6=94=AF=E6=8C=81?= =?UTF-8?q?=E5=9B=9B=E7=BA=A7=E5=91=BD=E4=BB=A4=E5=8F=82=E6=95=B0=EF=BC=8C?= =?UTF-8?q?analyzer=5Fcli=E5=85=A5=E5=8F=A3=E5=A4=84=E8=AF=86=E5=88=AB?= =?UTF-8?q?=E6=98=AF=E5=90=A6=E6=98=AF=E9=9B=86=E7=BE=A4=E5=9C=BA=E6=99=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../fusion_ops/fusion_ops_analyzer.py | 9 +-- profiler/advisor/common/analyzer_scopes.py | 7 +++ profiler/advisor/interface/interface.py | 57 ++++++++++++------- profiler/advisor/result/result.py | 4 ++ profiler/cli/analyze_cli.py | 25 ++++++-- 5 files changed, 72 insertions(+), 30 deletions(-) create mode 100644 profiler/advisor/common/analyzer_scopes.py diff --git a/profiler/advisor/analyzer/schedule/fusion_ops/fusion_ops_analyzer.py b/profiler/advisor/analyzer/schedule/fusion_ops/fusion_ops_analyzer.py index 4259db093b..01613dbe32 100644 --- a/profiler/advisor/analyzer/schedule/fusion_ops/fusion_ops_analyzer.py +++ b/profiler/advisor/analyzer/schedule/fusion_ops/fusion_ops_analyzer.py @@ -6,6 +6,7 @@ from tqdm import tqdm from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer from profiler.advisor.common import constant as const +from profiler.advisor.common.analyzer_scopes import SupportedScopes from profiler.advisor.common.timeline.event import TimelineEvent from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset from profiler.advisor.result.item import OptimizeItem, OptimizeRecord @@ -155,7 +156,7 @@ class TimelineFusionOpsAnalyzer(BaseAnalyzer): ) optimization_item = OptimizeItem( - const.AFFINITY_TRAINING_API, + SupportedScopes.TIMELINE_FUSION_OPS, desc, [suggestion] ) @@ -163,16 +164,16 @@ class TimelineFusionOpsAnalyzer(BaseAnalyzer): self.result.add(OptimizeRecord(optimization_item)) record_title = ["Affinity API", "Code stacks", "Stack called counts"] - self.result.add_detail(const.AFFINITY_TRAINING_API, headers=record_title) + self.result.add_detail(SupportedScopes.TIMELINE_FUSION_OPS, headers=record_title) for api_name, stacks_info in format_timeline_result(self.matched_op_stacks).items(): if not stacks_info: detail = [api_name, "null", "null"] - self.result.add_detail(const.AFFINITY_TRAINING_API, detail=detail) + self.result.add_detail(SupportedScopes.TIMELINE_FUSION_OPS, detail=detail) else: for stack in stacks_info: detail = [api_name, *stack] - self.result.add_detail(const.AFFINITY_TRAINING_API, detail=detail) + self.result.add_detail(SupportedScopes.TIMELINE_FUSION_OPS, detail=detail) def make_render(self): format_result_for_html = format_timeline_result(dict(self.matched_op_stacks), dump_html=True) diff --git a/profiler/advisor/common/analyzer_scopes.py b/profiler/advisor/common/analyzer_scopes.py new file mode 100644 index 0000000000..9a83adeb6d --- /dev/null +++ b/profiler/advisor/common/analyzer_scopes.py @@ -0,0 +1,7 @@ +class SupportedScopes: + + # used for specify fourth-level commands and define the key of the result dict + # the key defined bellow must be the same as value + TIMELINE_FUSION_OPS = "timeline_fusion_ops" + SLOW_RANK = "slow_rank" + SLOW_LINK = "slow_link" diff --git a/profiler/advisor/interface/interface.py b/profiler/advisor/interface/interface.py index 156922f4d1..c920ad4ea2 100644 --- a/profiler/advisor/interface/interface.py +++ b/profiler/advisor/interface/interface.py @@ -1,6 +1,8 @@ +from collections import OrderedDict import os from profiler.advisor.analyzer.schedule.fusion_ops.fusion_ops_analyzer import TimelineFusionOpsAnalyzer +from profiler.advisor.common.analyzer_scopes import SupportedScopes from profiler.advisor.utils.utils import Timer from profiler.advisor.analyzer.cluster.slow_rank_analyser import SlowRankAnalyzer from profiler.advisor.analyzer.cluster.slow_link_analyser import SlowLinkAnalyzer @@ -8,38 +10,51 @@ from profiler.advisor.analyzer.cluster.slow_link_analyser import SlowLinkAnalyze class Interface: supported_analyzer = { - "schedule": [TimelineFusionOpsAnalyzer], - "computation": [], - "communication": [], - "overall": [], - "dataloader": [], - "cluster": [SlowRankAnalyzer, SlowLinkAnalyzer] + "schedule": OrderedDict({ + SupportedScopes.TIMELINE_FUSION_OPS: TimelineFusionOpsAnalyzer + }), + "computation": OrderedDict(), + "communication": OrderedDict(), + "overall": OrderedDict(), + "dataloader": OrderedDict(), + "cluster": OrderedDict({ + SupportedScopes.SKOW_RANK: SlowRankAnalyzer, + SupportedScopes.SLOW_LINK: SlowLinkAnalyzer + }) } - all_dimension = supported_analyzer.keys() + all_dimension = list(supported_analyzer.keys()) def __init__(self, **kwargs): self.collection_path = os.path.realpath(kwargs.get("profiling_path")) @staticmethod - def get_analyzer(dimension): - return Interface.supported_analyzer.get(dimension, []) + def get_scope(dimension): + return list(Interface.supported_analyzer.get(dimension).keys()) - def get_result(self: any, dimension: str, render_html=False, **kwargs): + @staticmethod + def get_analyzer(dimension, scope): + return Interface.supported_analyzer.get(dimension).get(scope) + + def get_result(self: any, dimension: str, scope: str, render_html=False, output_dict=True, **kwargs): """ :Param mode: affinity apis, ai cpu and so on. """ - result_list = [] - analyzer_list = self.get_analyzer(dimension) - for idx, clss in enumerate(analyzer_list): - if clss and callable(clss): - analyzer = clss(collection_path = self.collection_path, **kwargs) - result_list.append(analyzer.optimize(**kwargs)) - if render_html and idx == len(analyzer_list) - 1: - if hasattr(analyzer, "html_render"): - analyzer.html_render.render_html() - analyzer.html_render.save_to_file(f'att_advisor_{Timer().strftime}.html') - return result_list + if dimension not in self.all_dimension: + raise ValueError(f"Error dimension {dimension}, supported dimensions are {self.all_dimension}") + + supported_scopes = self.get_scope(dimension) + if scope not in supported_scopes: + raise ValueError(f"Error scope {scope}, supported scopes are {supported_scopes}") + + analyzer = self.get_analyzer(dimension, scope)(collection_path=self.collection_path, **kwargs) + result = analyzer.optimize(**kwargs) + + if render_html: + if hasattr(analyzer, "html_render"): + analyzer.html_render.render_html() + analyzer.html_render.save_to_file(f'att_advisor_{Timer().strftime}.html') + return result if not output_dict else result.data.get(getattr(SupportedScopes, scope.upper())) if __name__ == "__main__": diff --git a/profiler/advisor/result/result.py b/profiler/advisor/result/result.py index 308db61231..30b8f5795c 100644 --- a/profiler/advisor/result/result.py +++ b/profiler/advisor/result/result.py @@ -102,6 +102,10 @@ class OptimizeResult: self.page_dict = False self._tune_op_list = [] + @property + def data(self): + return self.sheet_recorder.sheet_data + def add_tune_op_list(self, tune_op_list) -> None: """ add tune op name to tune op list diff --git a/profiler/cli/analyze_cli.py b/profiler/cli/analyze_cli.py index 95d8fa2f53..86af711564 100644 --- a/profiler/cli/analyze_cli.py +++ b/profiler/cli/analyze_cli.py @@ -10,6 +10,7 @@ from profiler.advisor.utils.tools import CONTEXT_SETTINGS, ClickAliasedGroup from profiler.advisor.common import constant from profiler.advisor.utils.utils import debug_option from profiler.advisor.interface.interface import Interface +from profiler.cluster_analyse.cluster_data_preprocess.pytorch_data_preprocessor import PytorchDataPreprocessor logger = logging.getLogger() @@ -18,12 +19,26 @@ def _analyze(dimensions, **kwargs): result_list = [] job_list = [] - for dimension in dimensions: - interface = Interface(**kwargs) - job_list.append((dimension, interface)) + def is_cluster(): + profiling_path = kwargs.get("profiling_path") + path_list = [os.path.join(profiling_path, dir_name) for dir_name in os.listdir(profiling_path)] + dir_list = [path for path in path_list if os.path.isdir(path)] + data_processor = PytorchDataPreprocessor(dir_list) + data_map = data_processor.get_data_map() + return len(data_map) > 1 + + is_cluster = is_cluster() - for i, (dimension, interface) in enumerate(job_list[::-1]): - result_list += interface.get_result(dimension, render_html=i == len(job_list) - 1) + for dimension in dimensions: + if not is_cluster and dimension == "cluster": + continue + for scope in Interface.get_scope(dimension): + interface = Interface(**kwargs) + job_list.append((dimension, scope, interface)) + + for i, (dimension, scope, interface) in enumerate(job_list[::-1]): + result_list.append( + interface.get_result(dimension, scope, render_html=i == len(job_list) - 1, output_dict=False)) for result in result_list[::-1]: if result and hasattr(result, "show"): -- Gitee From e8b54caada474a536371f9a7be72c924bcb1947b Mon Sep 17 00:00:00 2001 From: personalc Date: Wed, 15 May 2024 16:49:56 +0800 Subject: [PATCH 11/21] support profiling operator analysis --- .gitignore | 1 + MANIFEST.in | 4 +- profiler/advisor/analyzer/base_analyzer.py | 8 +- .../computation/aicpu/aicpu_checker.py | 278 +++++++++++++++++ .../computation/bound/block_dim_checker.py | 77 +++++ .../bound/operator_bound_checker.py | 56 ++++ .../op_compile/dynamic_shape_checker.py | 82 +++++ .../analyzer/computation/operator_checker.py | 282 ++++++++++++++++++ .../computation/profiling_analyzer.py | 71 +++++ .../fusion_ops/timeline_api_stack_checker.py | 163 ++++++++++ profiler/advisor/common/analyzer_scopes.py | 1 + profiler/advisor/common/profiling/__init__.py | 0 profiler/advisor/common/profiling/ge_info.py | 47 +++ profiler/advisor/common/profiling/msprof.py | 144 +++++++++ .../advisor/common/profiling/op_summary.py | 76 +++++ profiler/advisor/common/profiling/tasktime.py | 75 +++++ .../config/profiling_data_version_config.yaml | 80 +++++ .../dataset/cluster/cluster_dataset.py | 6 +- .../advisor/dataset/profiling/__init__.py | 0 .../advisor/dataset/profiling/builder_base.py | 39 +++ .../advisor/dataset/profiling/db_manager.py | 70 +++++ .../advisor/dataset/profiling/device_info.py | 61 ++++ .../dataset/profiling/info_collection.py | 270 +++++++++++++++++ .../dataset/profiling/profiling_dataset.py | 76 +++++ .../dataset/profiling/profiling_parser.py | 132 ++++++++ .../advisor/display/html/templates/main.html | 8 +- .../html/templates/operator_ai_cpu.html | 61 ++++ .../html/templates/operator_block_dim.html | 38 +++ .../templates/operator_dynamic_shape.html | 15 + .../html/templates/operator_no_bound.html | 38 +++ profiler/advisor/interface/interface.py | 7 +- profiler/advisor/rules/aicpu_rules.yaml | 107 +++++++ profiler/advisor/utils/utils.py | 7 +- profiler/cli/entrance.py | 4 +- profiler/test/tools/__init__.py | 0 profiler/test/tools/tool.py | 38 +++ .../test/ut/advisor/profiling/__init__.py | 0 .../profiling/test_profiling_analyzer.py | 42 +++ .../profiling/test_profiling_dataset.py | 46 +++ profiler/test/ut/advisor/test_utils.py | 49 +++ setup.py | 6 +- 41 files changed, 2544 insertions(+), 21 deletions(-) create mode 100644 profiler/advisor/analyzer/computation/aicpu/aicpu_checker.py create mode 100644 profiler/advisor/analyzer/computation/bound/block_dim_checker.py create mode 100644 profiler/advisor/analyzer/computation/bound/operator_bound_checker.py create mode 100644 profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py create mode 100644 profiler/advisor/analyzer/computation/operator_checker.py create mode 100644 profiler/advisor/analyzer/computation/profiling_analyzer.py create mode 100644 profiler/advisor/analyzer/schedule/fusion_ops/timeline_api_stack_checker.py create mode 100644 profiler/advisor/common/profiling/__init__.py create mode 100644 profiler/advisor/common/profiling/ge_info.py create mode 100644 profiler/advisor/common/profiling/msprof.py create mode 100644 profiler/advisor/common/profiling/op_summary.py create mode 100644 profiler/advisor/common/profiling/tasktime.py create mode 100644 profiler/advisor/config/profiling_data_version_config.yaml create mode 100644 profiler/advisor/dataset/profiling/__init__.py create mode 100644 profiler/advisor/dataset/profiling/builder_base.py create mode 100644 profiler/advisor/dataset/profiling/db_manager.py create mode 100644 profiler/advisor/dataset/profiling/device_info.py create mode 100644 profiler/advisor/dataset/profiling/info_collection.py create mode 100644 profiler/advisor/dataset/profiling/profiling_dataset.py create mode 100644 profiler/advisor/dataset/profiling/profiling_parser.py create mode 100644 profiler/advisor/display/html/templates/operator_ai_cpu.html create mode 100644 profiler/advisor/display/html/templates/operator_block_dim.html create mode 100644 profiler/advisor/display/html/templates/operator_dynamic_shape.html create mode 100644 profiler/advisor/display/html/templates/operator_no_bound.html create mode 100644 profiler/advisor/rules/aicpu_rules.yaml create mode 100644 profiler/test/tools/__init__.py create mode 100644 profiler/test/tools/tool.py create mode 100644 profiler/test/ut/advisor/profiling/__init__.py create mode 100644 profiler/test/ut/advisor/profiling/test_profiling_analyzer.py create mode 100644 profiler/test/ut/advisor/profiling/test_profiling_dataset.py create mode 100644 profiler/test/ut/advisor/test_utils.py diff --git a/.gitignore b/.gitignore index 36aacc7241..7e605b88a3 100644 --- a/.gitignore +++ b/.gitignore @@ -25,6 +25,7 @@ share/python-wheels/ .installed.cfg *.egg MANIFEST +.vscode # PyInstaller # Usually these files are written by a python script from a template diff --git a/MANIFEST.in b/MANIFEST.in index d86534656d..cfadbde1db 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,3 @@ -recursive-include profiler/advisor/display * -recursive-include profiler/advisor/third_party/simulation/display * -recursive-include profiler/advisor/checker * +recursive-include profiler/ * global-exclude */__pycache__/* global-exclude *.pyc diff --git a/profiler/advisor/analyzer/base_analyzer.py b/profiler/advisor/analyzer/base_analyzer.py index 6f9438ba75..160f05c464 100644 --- a/profiler/advisor/analyzer/base_analyzer.py +++ b/profiler/advisor/analyzer/base_analyzer.py @@ -50,7 +50,7 @@ class BaseAnalyzer(VersionControl, metaclass=ABCMeta): return None logger.info("Enable analysis %s with %s", self.__class__.__name__, ",".join(data_list)) - return func(self, data) + return func(self) return wrapper @@ -76,7 +76,11 @@ class BaseAnalyzer(VersionControl, metaclass=ABCMeta): for dataset_cls in dataset_cls_list: if dataset_cls and callable(dataset_cls): - dataset_cls(collection_path=self.collection_path, data=self.dataset_list, **self.kwargs) + dataset = dataset_cls(collection_path=self.collection_path, data=self.dataset_list, **self.kwargs) + key = dataset_cls.get_key() + if key not in self.dataset_list: + self.dataset_list[key] = [] + self.dataset_list[key].append(dataset) @staticmethod def get_first_data_by_key(data, key) -> Union[Dataset, None]: diff --git a/profiler/advisor/analyzer/computation/aicpu/aicpu_checker.py b/profiler/advisor/analyzer/computation/aicpu/aicpu_checker.py new file mode 100644 index 0000000000..4654d97225 --- /dev/null +++ b/profiler/advisor/analyzer/computation/aicpu/aicpu_checker.py @@ -0,0 +1,278 @@ +import copy +import os +from functools import partial +from typing import List, Dict, Optional + +import yaml +from profiler.advisor.analyzer.computation.operator_checker import OperatorChecker, logger +from profiler.advisor.analyzer.schedule.fusion_ops.timeline_api_stack_checker import OpStackFinder +from profiler.advisor.common import constant +from profiler.advisor.dataset.dataset import Dataset +from profiler.advisor.dataset.profiling.profiling_dataset import ProfilingDataset +from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset + + +class AicpuChecker(OperatorChecker): + _CHECKER = "aicpu operator" + _PROBLEM = "AICPU operator" + _MIN_TASK_DURATION = 20 + _description = f"Some operators and task duration exceed {_MIN_TASK_DURATION} us, such as :\n" + _SUGGESTION: List[str] = ["Modify code to avoid aicpu operator"] + STACK_INFO_ITEMS = "stack_info" + SUGGESTION_INFO_ITEMS = "suggestions" + _ITEMS = [ + "op_name", "op_type", "task_duration", "input_shapes", "input_data_types", "input_formats", "output_shapes", + "output_data_types", "output_formats" + ] + + def __init__(self, cann_version): + super(AicpuChecker, self).__init__(cann_version=cann_version) + self.aicpu_rules: Dict = {} + self.aicpu_checker: Dict = {} + self.load_aicpu_rules() + + def _check_data(self, profiling_data: ProfilingDataset) -> bool: + if not self._check_summary(profiling_data): + return False + return True + + def _check_operator(self, op_info) -> bool: + return op_info.task_type == constant.AI_CPU + + def load_aicpu_rules(self, rule_path="rules/aicpu_rules.yaml") -> Dict: + if not os.path.isabs(rule_path): + rule_path = os.path.join(os.path.dirname(__file__), + "../../computation/", "../", rule_path) + + if not os.path.exists(rule_path): + logger.warning("Skip analyze aicpu issues, because %s does not exist.", rule_path) + return {} + with open(rule_path, 'r') as f: + self.aicpu_rules = yaml.safe_load(f) + self.filter_aicpu_rules(self.aicpu_rules) + for checker_name, check_rule in self.aicpu_rules.items(): + if not isinstance(check_rule, (list, dict,)): + continue + + if checker_name not in AICPU_CHECKER.keys(): + logger.warning("Skip %s, which is not support now.", checker_name) + continue + + self.aicpu_checker[checker_name] = AICPU_CHECKER[checker_name](check_rule) + + def filter_aicpu_rules(self, aicpu_rules): + support_checkers = [] + for checkers in aicpu_rules['CommonChecker']: + for key, value in checkers.items(): + if key == 'DataTypeChecker' and value['cann_version'] != self.cann_verson: continue + support_checkers.append(checkers) + aicpu_rules['CommonChecker'] = support_checkers + return + + def check_aicpu_attr(self, op_info) -> List[str]: + suggestions = [] + for _, checker in self.aicpu_checker.items(): + suggestions.extend(checker.check(op_info)) + return suggestions + + def check(self, profiling_data: ProfilingDataset) -> bool: + """ + check if any operator need optimize + :param profiling_data: profiling datasest + :return: true or false + """ + + if not self._check_data(profiling_data): + return False + op_summary = profiling_data.op_summary + + def get_opeartor_stack_info(api_stack_finder: OpStackFinder, op_name_list: list) -> list: + data: Dict[str, Dataset] = {} + event_dataset = TimelineEventDataset(collection_path=profiling_data.collection_path, data=data, task_type=constant.AI_CPU) + + # disable multiprocessing, avoid cost time of enable new process for light task + api_stack_finder.get_api_stack_by_op(event_dataset, op_name_list, constant.AI_CPU, + disable_multiprocess=True) + return api_stack_finder._stack_record + + self._op_list = [] + total_task_duration = 0.0 + max_task_duration = 0.0 + for op_info in op_summary.op_list: + if self._check_operator(op_info): + self._op_list.append(op_info) + + task_duration = float(op_info.task_duration) + total_task_duration += task_duration + max_task_duration = max(max_task_duration, task_duration) + if (not self._op_list) or (max_task_duration < self._MIN_TASK_DURATION): + return False + + # 获取所有算子堆栈的信息 + op_name_list = [] + for op in self._op_list: + if op.op_name not in op_name_list: + op_name_list.append(op.op_name) + api_stack_finder = OpStackFinder() + stack_record = get_opeartor_stack_info(api_stack_finder, op_name_list) + + # task_id 到 stack 信息的对应 + self._op_list.sort(key=lambda x: int(x.task_id)) + stack_record.sort(key=lambda x: x[0]) + task_id_to_stack = dict() + for stack in stack_record: + task_id_to_stack[stack[0]] = stack[-1] + + # 算子追加堆栈属性 + for op in self._op_list: + stack = task_id_to_stack.get(int(op.task_id)) + op.add_attr(self.STACK_INFO_ITEMS, stack) + suggestions = self.check_aicpu_attr(op) + op.add_attr(self.SUGGESTION_INFO_ITEMS, suggestions) + + # double 类型算子判断 + double_type_ai_cpu_operator = [] + for op in self._op_list: + if not op.has_attr("input_data_types"): + logger.warning( + "Skip checking of input data in AICPU checker because of not containing input_data_dtypes in op summary") + break + if op.has_attr( + "input_data_types") and "DOUBLE" in op.input_data_types and op.op_name not in double_type_ai_cpu_operator: + double_type_ai_cpu_operator.append(op.op_name) + if bool(double_type_ai_cpu_operator): + self._SUGGESTION.append("Try to convert double type operator to float, such as {}".format( + ",".join(double_type_ai_cpu_operator))) + return True + + def make_render(self, html_render, record): + html_render.render_template(key="operator", + template_dir="templates", + template_name="operator_ai_cpu.html", + format_result=self.format_operator_result(record, constant.OPERATOR_LIST_UNLIMIT)) + + def format_operator_result(self, record, limit): + """ + Format operator result to html + :param record: profiling check record + :param limit: Limit number of operator statistics lists. + :return: + """ + optimization_item = record.optimization_item + release_suggestion_list = [] + for suggestion in optimization_item.suggestion: + release_suggestion_list.append(suggestion.replace('\n', '
')) + logger.debug("suggestion list is %s", release_suggestion_list) + format_result = {"record": record.__dict__, "suggestion": '
'.join(release_suggestion_list), + "task_duration": round(record.statistics_item.task_duration, 2)} + + statistic = self.group_by(copy.deepcopy(self._op_list), op_key='op_type', + limit=limit) + format_result["statistic"] = statistic + stack_key_list = ["stack_info", "input_data_types", "output_data_types"] + if statistic: + for key, info in statistic: + op_info_list = self.group_by_list(info.get("op_info_list"), stack_key_list, limit) + info["op_info_list"] = op_info_list + return format_result + + def group_by_list(self, op_list, op_key_list: List = ["stack_info", "input_data_types", "output_data_types"], + limit: int = constant.OPERATOR_LIST_UNLIMIT): + if op_list is None: + op_list = [] + + # op_key_list 合并添加合并的属性,作为 groupby 的 key value + op_key = '+'.join(op_key_list) # str, json + for op_info in op_list: + attribute = "" + for _op in op_key_list: + if op_info.get_attr(_op): + attribute += op_info.get_attr(_op) + op_info.add_attr(op_key, attribute) + + return self.group_by(op_list, op_key=op_key, limit=limit) + + +class BaserChecker: + def __init__(self, *args, **kwargs): + self.checker_list = [] + + def build(self): + raise NotImplementedError + + def check(self, op_info) -> List[str]: + suggestions = [] + for checker in self.checker_list: + suggestion = checker(op_info) + if suggestion is not None: + suggestions.append(suggestion) + return suggestions + + +class CommonChecker(BaserChecker): + def __init__(self, check_rules: List[Dict] = None): + super(CommonChecker, self).__init__() + self.check_rules = check_rules if check_rules is not None else [] + self.supported_checker = dict(DataTypeChecker=self.datatype_checker) + self.build() + + @staticmethod + def datatype_checker(check_item: Dict, op_info) -> Optional[str]: + supported_op_type = check_item.get('op_type', []) + suggestion = check_item.get('suggestion', "") + valid_inputs = check_item.get('input', []) + valid_outputs = check_item.get('output', []) + ignore_type = check_item.get('ignore_type', []) + op_type = getattr(op_info, 'op_type', "UNKNOWN") + if "__ALL__" in supported_op_type or \ + op_type.lower() in supported_op_type: + if op_type.lower() in ignore_type: + return None + + op_input_dtype = getattr(op_info, 'input_data_types', "").split(";") + op_input_dtype = [item.lower() for item in op_input_dtype] + op_output_dtype = getattr(op_info, 'output_data_types', "").split(";") + op_output_dtype = [item.lower() for item in op_output_dtype] + input_dtype_diff = set(op_input_dtype).difference(set(valid_inputs)) + output_dtype_diff = set(op_output_dtype).difference(set(valid_outputs)) + unsupported_dtype_diff = input_dtype_diff.union(output_dtype_diff) + if not unsupported_dtype_diff: + return None + + return suggestion.format(",".join(unsupported_dtype_diff).upper(), + op_type, + ",".join(valid_inputs).upper()) + + def build(self): + for check in self.check_rules: + (check_func, check_rule), = check.items() + if check_func not in self.supported_checker: + logger.warning("Skip %s, which has not been implemented.", check_func) + continue + self.checker_list.append(partial(self.supported_checker.get(check_func), check_rule)) + + +class ExampleGuideChecker(BaserChecker): + def __init__(self, check_rules: List[Dict] = None): + super(ExampleGuideChecker, self).__init__() + self.check_rules = check_rules if check_rules is not None else [] + self.build() + + def build(self): + def _guide_url(check_item: Dict, op_info) -> Optional[str]: + supported_op_type = check_item.get('op_type', []) + url = check_item.get('url', "") + suggestion = check_item.get('suggestion', "") + + if getattr(op_info, 'op_type', "UNKNOWN").lower() in supported_op_type: + return suggestion if "{}" not in suggestion else suggestion.format(url) + + for check in self.check_rules: + (_, check_rule), = check.items() + self.checker_list.append(partial(_guide_url, check_rule)) + + +AICPU_CHECKER = { + "CommonChecker": CommonChecker, + "ExampleGuideChecker": ExampleGuideChecker +} diff --git a/profiler/advisor/analyzer/computation/bound/block_dim_checker.py b/profiler/advisor/analyzer/computation/bound/block_dim_checker.py new file mode 100644 index 0000000000..d1a1384b8d --- /dev/null +++ b/profiler/advisor/analyzer/computation/bound/block_dim_checker.py @@ -0,0 +1,77 @@ +import logging + +from typing import List + +from profiler.advisor.analyzer.computation.operator_checker import OperatorChecker +from profiler.advisor.common import constant +from profiler.advisor.config.config import Config +from profiler.advisor.dataset.profiling.profiling_dataset import ProfilingDataset + +logger = logging.getLogger() + + +class BlockDimChecker(OperatorChecker): + _SUGGESTION: List[str] = [] + _CHECKER = "block dim" + _PROBLEM = "block dim" + _description = "some operator does not make full use of {} ai core" + _ITEMS = [ + "op_name", "op_type", "task_type", "task_duration", "income", "block_dim", "mix_block_dim", "input_shapes", + "input_data_types", "input_formats", "output_shapes", "output_data_types", "output_formats" + ] + + def _check_data(self, data): + if not self._check_summary(data): + return False + if not Config().get_config("ai_core_num"): + logger.warning(self.SKIP_CHECK_MSG, self._CHECKER, "ai core num in info.json file") + return False + summary = data.op_summary + op_info = summary.op_list[0] + if not hasattr(op_info, "block_dim"): + logger.warning(self.SKIP_CHECK_MSG, self._CHECKER, "block dim in op summary") + return False + if Config().get_config("ai_core_num"): + self._aicore_num = int(Config().get_config("ai_core_num")) + if Config().get_config("aiv_num"): + self._aiv_num = int(Config().get_config("aiv_num")) + self._description = self._description.format(self._aicore_num) + if self._aiv_num: + self._description += f" or {self._aiv_num} ai vector core" + self._description += f";\n Top-{OperatorChecker._MAX_TUNE_OP_NUM} operator of " \ + "task duration are as follows:\n" + return True + + def make_render(self, html_render, record): + html_render.render_template(key="operator", + template_dir="templates", + template_name="operator_block_dim.html", + format_result=self.format_operator_result(record, constant.OPERATOR_OUT_TOPK)) + + def _check_operator(self, op_info) -> bool: + if op_info.task_type not in ["AI_CORE", "AI_VECTOR_CORE", "MIX_AIC"]: + return False + block_dim = int(op_info.block_dim) + core_num = self.get_core_num(op_info) + if block_dim % core_num == 0: + return False + if op_info.task_type == "MIX_AIC" and hasattr(op_info, "mix_block_dim") \ + and self._aiv_num and int(op_info.mix_block_dim) % self._aiv_num == 0: + return False + return True + + def get_core_num(self, op_info): + """ + get core num of task type + """ + if op_info.task_type == "AI_CORE" or not self._aiv_num: + core_num = self._aicore_num + else: + core_num = self._aiv_num + return core_num + + def format_suggestion_content(self, profiling_data: ProfilingDataset) -> None: + if profiling_data.PROF_TYPE == constant.ASCEND_PYTORCH_PROFILER: + self._SUGGESTION.append(self.PyTorch_OPERATOR_TUNE_SUGGESTION) + elif profiling_data.PROF_TYPE == constant.MSLITE: + self._SUGGESTION.append(self.MSLite_OPERATOR_TUNE_SUGGESTION) diff --git a/profiler/advisor/analyzer/computation/bound/operator_bound_checker.py b/profiler/advisor/analyzer/computation/bound/operator_bound_checker.py new file mode 100644 index 0000000000..d919eb7d5f --- /dev/null +++ b/profiler/advisor/analyzer/computation/bound/operator_bound_checker.py @@ -0,0 +1,56 @@ +import logging +from typing import List + +from profiler.advisor.analyzer.computation.operator_checker import OperatorChecker +from profiler.advisor.common import constant +from profiler.advisor.config.config import Config +from profiler.advisor.dataset.profiling.profiling_dataset import ProfilingDataset +from profiler.advisor.utils.utils import to_percent + +logger = logging.getLogger() + + +class OperatorBoundChecker(OperatorChecker): + _MIN_TASK_DURATION = 20 # min task duration 20us + _CHECKER = "operator no bound" + _PROBLEM = "operator no bound" + _SUGGESTION: List[str] = [] + _description = ( + f"There is no mte, cube, vector, scalar ratio is more than {to_percent(Config().operator_bound_ratio)};\n" + + f"Top task duration operators need to be tuned are as follows: \n") + _ITEMS = [ + "op_name", "op_type", "task_type", "task_duration", "vec_ratio", "mac_ratio", "scalar_ratio", "mte1_ratio", + "mte2_ratio", "mte3_ratio", "block_dim", "input_shapes", "input_data_types", "input_formats", "output_shapes", + "output_data_types", "output_formats" + ] + + def _check_data(self, data): + if not self._check_summary(data): + return False + for op_info in data.op_summary.op_list: + if self._check_operator(op_info): + return True + + logger.warning(self.SKIP_CHECK_MSG, self._CHECKER, "ratio in op summary") + return False + + def _check_operator(self, op_info) -> bool: + bound_list = ["vec_ratio", "mac_ratio", "scalar_ratio", "mte1_ratio", "mte2_ratio", "mte3_ratio"] + ratio_list = [self.get_ratio(op_info, attr) for attr in bound_list] + if not any(ratio_list): + return False # no data, skip check + if any(ratio and ratio > Config().operator_bound_ratio for ratio in ratio_list): + return False + return True + + def make_render(self, html_render, record): + html_render.render_template(key="operator", + template_dir="templates", + template_name="operator_no_bound.html", + format_result=self.format_operator_result(record, constant.OPERATOR_OUT_TOPK)) + + def format_suggestion_content(self, profiling_data: ProfilingDataset) -> None: + if profiling_data.PROF_TYPE == constant.ASCEND_PYTORCH_PROFILER: + self._SUGGESTION.append(self.PyTorch_OPERATOR_TUNE_SUGGESTION) + elif profiling_data.PROF_TYPE == constant.MSLITE: + self._SUGGESTION.append(self.MSLite_OPERATOR_TUNE_SUGGESTION) diff --git a/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py b/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py new file mode 100644 index 0000000000..4d405eb918 --- /dev/null +++ b/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py @@ -0,0 +1,82 @@ +import copy +import logging +from typing import List + +from profiler.advisor.analyzer.computation.operator_checker import OperatorChecker +from profiler.advisor.common import constant +from profiler.advisor.dataset.profiling.info_collection import OpInfo +from profiler.advisor.result.item import OptimizeItem, StatisticsItem, OptimizeRecord + +logger = logging.getLogger() + + +class DynamicShapeChecker(OperatorChecker): + ENABLE_COMPILED_SUGGESTION = "Optimize by enabling compiled operator, such as:\n" \ + "`torch_npu.npu.set_compile_mode(jit_compile=False)`\n" + _SUGGESTION: List[str] = [ENABLE_COMPILED_SUGGESTION] + _CHECKER = "dynamic shape operator" + _PROBLEM = "Dynamic shape operator" + _description = f"Found all operators are dynamic shape" + _op_list: List[OpInfo] = [] + _tune_op_list: List[str] = [] # record op name to be tuned, and save to tune_ops_file.cfg + _op_views: List = [] + + def __init__(self, cann_version) -> None: + super().__init__(cann_version = cann_version) + + def check(self, profiling_database) -> bool: + # CANN 8.0.0 之前从 ge_info 中获取 op_state 属性,进行动态 shape 逻辑判断 + if hasattr(profiling_database, "ge_info") and profiling_database.ge_info: + ge_info = profiling_database.ge_info + static_shape_operators = ge_info.get_static_shape_operators() + if len(static_shape_operators) == 0: + OperatorChecker.IS_ALL_OPERATOR_DYNAMIC_SHAPE = True + return True + # CANN 8.0.0 之后 op_state 属性从 op_summary 文件中获取 + elif hasattr(profiling_database, "op_summary"): + static_shape_operators = profiling_database.op_summary.get_static_shape_operators() + if len(static_shape_operators) == 0: + OperatorChecker.IS_ALL_OPERATOR_DYNAMIC_SHAPE = True + return True + else: + logger.warning("Skip dynamic shape checker because of not containing ge_info.db file in host filefloder.\n" + "To enable dynamic shape checker, please try to set data_simplification=False in experimental_config.\n" + "More details please refer to link : %s", constant.ASCEND_PROFILER_URL) + return False + + def make_record(self, profiling_database) -> OptimizeRecord: + """ + make record for what and how to optimize + """ + + optimization_item = OptimizeItem( + self._PROBLEM, + self._description, + self._SUGGESTION + ) + statistics_item = StatisticsItem("", "", 1) + return OptimizeRecord(optimization_item, statistics_item) + + def format_operator_result(self, record, limit=-1): + """ + Format operator result to html + :param record: profiling check record + :param limit: Limit number of operator statistics lists. + :return: + """ + optimization_item = record.optimization_item + release_suggestion_list = [] + for suggestion in optimization_item.suggestion: + release_suggestion = copy.deepcopy(suggestion) + if release_suggestion == DynamicShapeChecker.ENABLE_COMPILED_SUGGESTION: + release_suggestion += \ + f"for details please refer to link : LINK" + release_suggestion_list.append(release_suggestion.replace('\n', '
')) + format_result = {"record": record.__dict__, "suggestion": '
'.join(release_suggestion_list)} + return format_result + + def make_render(self, html_render, record): + html_render.render_template(key="operator", + template_dir="templates", + template_name="operator_dynamic_shape.html", + format_result=self.format_operator_result(record)) diff --git a/profiler/advisor/analyzer/computation/operator_checker.py b/profiler/advisor/analyzer/computation/operator_checker.py new file mode 100644 index 0000000000..e8490ff206 --- /dev/null +++ b/profiler/advisor/analyzer/computation/operator_checker.py @@ -0,0 +1,282 @@ +import copy +import logging +from textwrap import fill +from typing import List + +from profiler.advisor.common import constant +from profiler.advisor.common.version_control import VersionControl +from profiler.advisor.config.config import Config +from profiler.advisor.dataset.profiling.info_collection import OpInfo +from profiler.advisor.dataset.profiling.profiling_dataset import ProfilingDataset +from profiler.advisor.result.item import OptimizeItem, StatisticsItem, OptimizeRecord +from profiler.advisor.utils.utils import safe_division + +logger = logging.getLogger() + + +class OperatorChecker(VersionControl): + _SUPPORT_VERSIONS = [constant.CANN_VERSION_C30, constant.CANN_VERSION_C13, constant.CANN_VERSION_C15, constant.CANN_VERSION_C17] + IS_ALL_OPERATOR_DYNAMIC_SHAPE = False + _MAX_TUNE_OP_NUM = constant.OPERATOR_OUT_TOPK + _MIN_TASK_DURATION = 0 + _MIN_TASK_DURATION_RATIO = 1.0 + _MIN_TOTAL_DURATION_RATIO = 1.0 + _CHECKER = str() + _PROBLEM = str() + _description = str() + STACK_INFO_ITEMS = "" + _ITEMS: List[str] = [] + _SUGGESTION: List[str] = [] + SKIP_CHECK_MSG = "Skip %s checker because of not containing %s" + _tune_op_info_list: List[OpInfo] = [] + PyTorch_OPERATOR_TUNE_SUGGESTION = f"Optimize operator by AOE, such as:\n" \ + f"'aoe --job_type=2 --model_path=$user_dump_path " \ + f"--tune_ops_file={Config().tune_ops_file}'\n" + MSLite_OPERATOR_TUNE_SUGGESTION = f"Optimize operator by AOE in mindspore lite framework, such as:\n" \ + f"converter_lite --fmk=ONNX --optimize=ascend_oriented --saveType=MINDIR " \ + f"--modelFile=$user_model.onnx --outputFile=user_model --configFile=./config.txt\n" + _tune_op_list: List[str] = [] + + def __init__(self, cann_version: str): + self.cann_verson = cann_version + self._op_list: List[OpInfo] = [] + + def check(self, profiling_data: ProfilingDataset) -> bool: + """ + check if any operator need optimize + :param profiling_data: profiling datasest + :return: true or false + """ + if not self._check_data(profiling_data): + return False + + summary = profiling_data.op_summary + total_task_duration = 0.0 + max_task_duration = 0.0 + for op_info in summary.op_list: + if not self._check_operator(op_info): + continue + task_duration = float(op_info.task_duration) + total_task_duration += task_duration + max_task_duration = max(max_task_duration, task_duration) + self._op_list.append(op_info) + if task_duration > self._MIN_TASK_DURATION: + self._tune_op_info_list.append(op_info) + + if any([ + max_task_duration > self._MIN_TASK_DURATION, + round(safe_division(max_task_duration, summary.get_total_task_duration()), + 4) > self._MIN_TASK_DURATION_RATIO, + round(safe_division(total_task_duration, summary.get_total_task_duration()), 4) > + self._MIN_TOTAL_DURATION_RATIO, + ]): + self._op_list.sort(key=lambda x: float(x.get_attr("task_duration")), reverse=True) + self._tune_op_info_list.sort(key=lambda x: float(x.get_attr("task_duration")), reverse=True) + for op in self._op_list: + if op.op_name not in self._tune_op_list and len(self._tune_op_list) < constant.OPERATOR_OUT_TOPK: + self._tune_op_list.append(op.op_name) + return True + return False + + def make_record(self, profiling_data: ProfilingDataset): + """ + Make record for what and how to optimize + :param profiling_data: profiling data + :return: optimize record + """ + task_duration_list = [float(op_info.get_attr("task_duration")) for op_info in self._op_list if + hasattr(op_info, "get_attr")] + total_cost_time = sum(task_duration_list) + total_task_duration = profiling_data.op_summary.get_total_task_duration() + count = len(task_duration_list) + statistics_item = StatisticsItem(total_task_duration, total_cost_time, count, self.get_incomes()) + optimization_item = OptimizeItem( + self._PROBLEM, + self._get_description(self._description, self.get_op_type_list(self._op_list)[:self._MAX_TUNE_OP_NUM]), + self._SUGGESTION + ) + return OptimizeRecord(optimization_item, statistics_item) + + def _get_description(self, description, op_type_list=None): + if not op_type_list: + return description + + desc_suffix = [] + for i in range(len(op_type_list)): + if i % 3 == 0 and i != 0: + desc_suffix.append("\n") + + desc_suffix.append(f"{op_type_list[i]}") + + if i < len(op_type_list) - 1: + desc_suffix.append(", ") + + description += "".join(desc_suffix) + return description + + def pre_check(self, profiling_data) -> bool: + self.format_suggestion_content(profiling_data) + return not (OperatorChecker.IS_ALL_OPERATOR_DYNAMIC_SHAPE and ( + OperatorChecker.PyTorch_OPERATOR_TUNE_SUGGESTION or OperatorChecker.MSLite_OPERATOR_TUNE_SUGGESTION + ) in self._SUGGESTION) + + def format_operator_result(self, record, limit): + """ + Format operator result to html + :param record: profiling check record + :param limit: Limit number of operator statistics lists. + :return: + """ + optimization_item = record.optimization_item + release_suggestion_list = [] + for suggestion in optimization_item.suggestion: + release_suggestion = copy.deepcopy(suggestion) + if release_suggestion == OperatorChecker.PyTorch_OPERATOR_TUNE_SUGGESTION: + release_suggestion += \ + (f"for details please refer to link : LINK") + elif release_suggestion == OperatorChecker.MSLite_OPERATOR_TUNE_SUGGESTION: + release_suggestion += \ + (f"\nThe config file for MSLite AOE usage is as follows:\n" \ + f"[ascend_context]\n" \ + f"aoe_mode=\"operator tuning\"\n" \ + f"--tune_ops_file={Config().tune_ops_file}\n" + f"\nFor details please refer to link : LINK") + release_suggestion_list.append(release_suggestion.replace('\n', '
')) + format_result = {"record": record.__dict__, + "suggestion": fill('
'.join(release_suggestion_list), width=200), + "task_duration": round(record.statistics_item.task_duration, 2)} + statistic = self.group_by(copy.deepcopy(self._op_list), limit=limit) + format_result["statistic"] = statistic + return format_result + + def group_by(self, op_list, op_key="op_type", + limit: int = constant.OPERATOR_LIST_UNLIMIT): + """ + group by Profiling.OpInfo's attribute key, then return top limit tuple by duration + :param op_list: input a OpInfo list + :param op_key: group by Profiling.OpInfo's attribute key + :param limit: top limit num, if you do not need to limit the length of tuple, input -1(int) + :return: + """ + if op_list is None: + op_list = [] + statistic = {} # str, json + for op_info in op_list: + if statistic.get(op_info.get_attr(op_key)): + statistic[op_info.get_attr(op_key)]["summary"]["total_duration"] = float( + statistic[op_info.get_attr(op_key)]["summary"]["total_duration"]) + float( + op_info.get_attr("task_duration", constant.DEFAULT_DURATION_ZERO)) + statistic[op_info.get_attr(op_key)]["summary"]["counts"] += 1 + stack_info = op_info.get_attr("stack_info") + if stack_info: + op_info.stack_info = stack_info.replace('\r\n', '
') + statistic[op_info.get_attr(op_key)]["op_info_list"].append(op_info) + else: + statistic[op_info.get_attr(op_key)] = {"summary": {}, "op_info_list": []} + statistic[op_info.get_attr(op_key)]["summary"]["op_type"] = op_info.get_attr( + "op_type", constant.DEFAULT_OPERATOR_TYPE) + statistic[op_info.get_attr(op_key)]["summary"]["total_duration"] = float( + op_info.get_attr("task_duration", constant.DEFAULT_DURATION_ZERO)) + statistic[op_info.get_attr(op_key)]["summary"]["counts"] = 1 + stack_info = op_info.get_attr("stack_info") + if stack_info: + op_info.stack_info = stack_info.replace('\r\n', '
') + statistic[op_info.get_attr(op_key)]["op_info_list"] = [op_info] + + if statistic: + for op_key in statistic.keys(): + statistic[op_key]["summary"]["total_duration"] = round( + statistic[op_key]["summary"]["total_duration"], 2) + # Grouped by op_type, sorted by total_duration, and obtained the top 10 operators that take the most time. + if limit > 0: + statistic = sorted( + statistic.items(), key=lambda kv: kv[1]["summary"]["total_duration"], reverse=True)[:limit] + else: + statistic = sorted(statistic.items(), key=lambda kv: kv[1]["summary"]["total_duration"], reverse=True) + else: + logger.warning("%s checker do not has results to format html", str(self.__class__.__name__)) + return statistic + + def _check_data(self, profiling_data): + return True + + def _check_operator(self, op_info): + return False + + def _get_income(self, _op_info: OpInfo) -> float: + return 0 + + def get_tune_op_list(self): + """ + get tune op list + :return: tune op list + """ + return self._tune_op_list + + def get_views(self, _graph_data): + """Get node views.""" + return [] + + @classmethod + def get_name(cls): + """ + get name of checker + :return: checker name + """ + return cls._PROBLEM + + def get_incomes(self) -> float: + """get incomes""" + incomes = 0.0 + for op_info in self._op_list: + income = self._get_income(op_info) + setattr(op_info, "income", round(income, 2)) + incomes += income + return incomes + + def get_op_type_list(self, op_list: List[OpInfo]): + """get op type list""" + op_type_list = [] + for op_info in op_list: + if op_info.op_type not in op_type_list: + op_type_list.append(op_info.op_type) + return op_type_list + + def _check_summary(self, data: ProfilingDataset): + if not data.op_summary: + logger.warning(self.SKIP_CHECK_MSG, self._CHECKER, "op summary") + return False + return True + + @staticmethod + def get_ratio(op_info: OpInfo, attr: str) -> float: + if not op_info.has_attr(attr): + return 0 + value = op_info.get_attr(attr) + if not value or value == "N/A": + return 0 + return float(value) + + def get_details(self) -> list: + """ + get details of operator to be optimized + :return: detail list + """ + op_list = self._op_list + if not op_list or not (self._ITEMS + [self.STACK_INFO_ITEMS]): + return [] + details = [] + attrs = [attr for attr in (self._ITEMS + [self.STACK_INFO_ITEMS]) if op_list[0].has_attr(attr)] + details.append(attrs) + op_list = sorted(op_list, key=lambda x: float(x.get_attr("task_duration")), reverse=True) + for op_info in op_list: + content = [ + op_info.get_attr(attr) if attr != "aicore_time" + else op_info.get_float_attr(attr, strict_mode=True) + + op_info.get_float_attr("aiv_time", strict_mode=True) for attr in attrs + ] + details.append(content) + return details + + def format_suggestion_content(self, profiling_data: ProfilingDataset) -> None: + return diff --git a/profiler/advisor/analyzer/computation/profiling_analyzer.py b/profiler/advisor/analyzer/computation/profiling_analyzer.py new file mode 100644 index 0000000000..98d3c5c49b --- /dev/null +++ b/profiler/advisor/analyzer/computation/profiling_analyzer.py @@ -0,0 +1,71 @@ +import logging +from abc import ABC +from typing import Dict, List + +from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.advisor.common import constant +from profiler.advisor.result.result import OptimizeResult +from profiler.advisor.analyzer.computation.aicpu.aicpu_checker import AicpuChecker +from profiler.advisor.analyzer.computation.bound.block_dim_checker import BlockDimChecker +from profiler.advisor.analyzer.computation.bound.operator_bound_checker import OperatorBoundChecker +from profiler.advisor.analyzer.computation.operator_checker import OperatorChecker +from profiler.advisor.analyzer.computation.op_compile.dynamic_shape_checker import DynamicShapeChecker +from profiler.advisor.analyzer.computation.operator_checker import OperatorChecker +from profiler.advisor.display.html.render import HTMLRender +from profiler.advisor.dataset.profiling.profiling_dataset import ProfilingDataset +from profiler.advisor.utils.utils import get_supported_subclass + +logger = logging.getLogger() + + +class ProfilingAnalyzer(BaseAnalyzer, ABC): + dataset_cls_list = [ProfilingDataset] + + def __init__(self, collection_path, **kwargs) -> None: + cann_version = kwargs.get("cann_version", constant.DEFAULT_CANN_VERSION) + torch_version = kwargs.get("torch_version", constant.DEFAULT_TORCH_VERSION) + super().__init__(collection_path, cann_version=cann_version, torch_version=torch_version, **kwargs) + self.checker_list = [checker(cann_version) for checker in get_supported_subclass(OperatorChecker, cann_version)] + # 动态 shape checker 放到首位,因为动态 shape 情形下AOE算子调优现在不支持,AOE 算子调优 checker 可以跳过 + index = next((i for i, item in enumerate(self.checker_list) if isinstance(item, DynamicShapeChecker)), None) + self.checker_list.insert(0, self.checker_list.pop(index)) + self.html_render = HTMLRender() + self.result = OptimizeResult() + + @BaseAnalyzer.check_data((ProfilingDataset.get_key(),)) + def optimize(self) -> OptimizeResult: + """ + optimize operator + :param data: input datasets + :return: result + """ + profiling_data = self.get_first_data_by_key(self.dataset_list, ProfilingDataset.get_key()) + for checker in self.checker_list: + if not checker.pre_check(profiling_data): + continue + if checker.check(profiling_data): + # add record + record = checker.make_record(profiling_data) + checker.make_render(self.html_render, record) + self.result.add(record) + # add details + details = checker.get_details() + if details: + for i, detail in enumerate(details): + if i == 0: + # the first row is header + self.result.add_detail(checker.get_name(), headers=detail) + else: + self.result.add_detail(checker.get_name(), detail=detail) + # add tune op list + tune_op_list = checker.get_tune_op_list() + if tune_op_list: + self.result.add_tune_op_list(tune_op_list) + + return self.result + + def make_record(self): + pass + + def make_render(self): + pass diff --git a/profiler/advisor/analyzer/schedule/fusion_ops/timeline_api_stack_checker.py b/profiler/advisor/analyzer/schedule/fusion_ops/timeline_api_stack_checker.py new file mode 100644 index 0000000000..f684a48921 --- /dev/null +++ b/profiler/advisor/analyzer/schedule/fusion_ops/timeline_api_stack_checker.py @@ -0,0 +1,163 @@ +import logging +from typing import List + +from profiler.advisor.common import constant as const +from profiler.advisor.common.timeline.event import TimelineEvent +from profiler.advisor.dataset.timeline_event_dataset import TimelineEventDataset +from profiler.advisor.result.result import OptimizeResult +from profiler.advisor.result.item import OptimizeItem, OptimizeRecord +from profiler.advisor.utils.utils import get_analyze_processes, ParallelJob + +logger = logging.getLogger() + + +class OpStackFinder: + + def __init__(self): + self.n_processes = get_analyze_processes() + self._stack_record = [] + self._task_id_record = {} + self.op_name = None + self.task_type = None + self.matched_index = set() + + def get_api_stack_by_op(self, event_dataset: TimelineEventDataset, op_name: List[str] = None, task_type: str = None, + disable_multiprocess=False): + """ + :Param event_dataset: dataset of timeline event + :Param op_name: operator name, e.g. IndexPutV2 + :Param task_type: operator task type, optionals are AI_CPU and AI_CORE + :Param disable_multiprocess: disable multiprocessing, avoid cost time of enable new process for light task + """ + if not op_name: + op_name = [] + if not isinstance(op_name, list): + op_name = [op_name] + + self.op_name = ",".join(op_name) + self.task_type = task_type + op_name_list = event_dataset.task_op_names if not op_name else op_name + + if self.n_processes <= 1 or disable_multiprocess: + self._query_stacks_multiprocess(event_dataset, op_name_list, task_type) + else: + event_num_per_process = int(len(op_name_list) / self.n_processes) + 1 + parallel_analyzer = ParallelJob( + self._query_stacks_multiprocess, + [[event_dataset, op_name_list[i:i + event_num_per_process], task_type] + for i in range(0, len(op_name_list), event_num_per_process)], + job_name="Analyzing operator stacks from timeline" + ) + parallel_analyzer.start(self.n_processes) + self.query_stack(event_dataset) + + def make_record(self, result: OptimizeResult): + """ + make record for what and how to optimize + """ + if not self._stack_record: + return + + desc = f"Found {len(self._stack_record)} called stacks for" + if self.op_name and self.task_type: + desc += f" operators with name '{self.op_name}' with task type '{self.task_type}'" + elif self.op_name and not self.task_type: + desc += f" operators with name '{self.op_name}'" + elif self.task_type and not self.op_name: + desc += f" operators with task type '{self.task_type}'" + else: + desc += " all operators" + + suggestion = f"Please use command 'ma-advisor analyze profiling' to analyze operators" + optimization_item = OptimizeItem( + "Operator stacks", + desc, + [suggestion] + ) + result.add(OptimizeRecord(optimization_item)) + + record_title = ["Task ID", "op name", "op type", "code stacks"] + result.add_detail('operator stacks', headers=record_title) + + for op_info in self._stack_record: + result.add_detail('operator stacks', detail=op_info) + + def _get_api_stack_by_op(self, event_dataset: TimelineEventDataset, op_name: str, task_type: str): + for _, src_op_event in event_dataset.ops_with_task_type.items(): + + op_task_type = src_op_event.get(const.TASK_TYPE) + if not (src_op_event.name == op_name and op_task_type and op_task_type == task_type): + continue + + torch_to_npu_key = f"s-{src_op_event.tid}-{src_op_event.ts}" + torch_to_npu_event = event_dataset.torch_to_npu.get(torch_to_npu_key) or event_dataset.torch_to_npu.get( + f"s-{src_op_event.ts}") or event_dataset.torch_to_npu.get(f"s-{src_op_event.ts.replace('.', '')}") + + acl_to_npu_event = src_op_event.ts in event_dataset.acl_to_npu + + if not torch_to_npu_event and not acl_to_npu_event: + continue + + # query stack by torch_to_npu first, due to each operator had acl_to_npu incoming flow in cann6.3 + if torch_to_npu_event: + dst_op_index = self._query_index_by_torch_to_npu(event_dataset, torch_to_npu_event) + else: + dst_op_index = self._query_index_by_acl_to_npu(acl_to_npu_event) + + if not dst_op_index: + continue + + task_id = src_op_event.task_id + if not task_id: + continue + self.matched_index.add(dst_op_index) + if dst_op_index not in self._task_id_record: + self._task_id_record[dst_op_index] = [] + self._task_id_record[dst_op_index].append([task_id, op_name, task_type]) + + def _query_index_by_torch_to_npu(self, event_dataset, torch_to_npu_event): + dst_op_event_key = torch_to_npu_event.ts + dst_op_event = event_dataset.ops_with_stack.get(dst_op_event_key) + + if not dst_op_event: + return const.TIMELINE_BACKWARD_NO_STACK_CODE + + return dst_op_event.get("dataset_index") + + def _query_index_by_acl_to_npu(self, acl_to_npu_event): + if acl_to_npu_event: + return const.TIMELINE_ACL_TO_NPU_NO_STACK_CODE + + def _query_stacks_multiprocess(self, event_dataset, op_name_list, task_type): + + for op_name in op_name_list: + if task_type is not None: + self._get_api_stack_by_op(event_dataset, op_name, task_type) + else: + self._get_api_stack_by_op(event_dataset, op_name, const.AI_CORE) + self._get_api_stack_by_op(event_dataset, op_name, const.AI_CPU) + + def _format_stack_record(self): + stack_list = [] + for task_id, stack_info in self._task_id_record.items(): + stack_list.append([task_id, *stack_info]) + return stack_list + + def _query_stack_by_matched_index(self, index, event): + if index not in self.matched_index: + return None + event = TimelineEvent(event) + stack = event.args.get(const.CALL_STACKS) + stack = stack if stack else const.NO_STACK_REASON_MAP.get(const.TIMELINE_BACKWARD_NO_STACK_CODE) + for matched_op_info in self._task_id_record.get(index, []): + self._stack_record.append([*matched_op_info, stack]) + + for matched_op_info in self._task_id_record.get(const.TIMELINE_ACL_TO_NPU_NO_STACK_CODE, []): + self._stack_record.append([*matched_op_info, + const.NO_STACK_REASON_MAP.get(const.TIMELINE_ACL_TO_NPU_NO_STACK_CODE)]) + return None + + def query_stack(self, event_dataset: TimelineEventDataset): + if not event_dataset.dataset_len: + return + _ = event_dataset.parse_data_with_generator(self._query_stack_by_matched_index) diff --git a/profiler/advisor/common/analyzer_scopes.py b/profiler/advisor/common/analyzer_scopes.py index 9a83adeb6d..03d7759a72 100644 --- a/profiler/advisor/common/analyzer_scopes.py +++ b/profiler/advisor/common/analyzer_scopes.py @@ -5,3 +5,4 @@ class SupportedScopes: TIMELINE_FUSION_OPS = "timeline_fusion_ops" SLOW_RANK = "slow_rank" SLOW_LINK = "slow_link" + PORFILING_OPERATOR_ANALYSIS = "profiling_operator_analysis" diff --git a/profiler/advisor/common/profiling/__init__.py b/profiler/advisor/common/profiling/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/common/profiling/ge_info.py b/profiler/advisor/common/profiling/ge_info.py new file mode 100644 index 0000000000..9996ec611a --- /dev/null +++ b/profiler/advisor/common/profiling/ge_info.py @@ -0,0 +1,47 @@ +""" +DB +""" +import logging +import os +from typing import Any, List + +from sqlalchemy import text + +from profiler.advisor.dataset.profiling.db_manager import ConnectionManager +from profiler.advisor.dataset.profiling.profiling_parser import ProfilingParser + +logger = logging.getLogger() + + +class GeInfo(ProfilingParser): + """ + ge info file + """ + FILE_PATTERN = r"ge_info.db" + FILE_PATTERN_MSG = "ge_info.db" + FILE_INFO = "ge info" + STATIC_OP_STATE = "0" + DYNAMIC_OP_STATE = "1" + + def __init__(self, path: str) -> None: + super().__init__(path) + self.op_state_info_list = None + + def parse_from_file(self, profiling_db_file): + """ + ge info + """ + db_path, db_file = os.path.split(profiling_db_file) + if not ConnectionManager.check_db_exists(db_path, [db_file]): + return False + conn = ConnectionManager(db_path, db_file) + if conn.check_table_exists(['TaskInfo']): + with conn().connect() as sql_conn: + self.op_state_info_list = sql_conn.execute(text("select op_name, op_state from TaskInfo")).fetchall() + return True + + def get_static_shape_operators(self) -> List[Any]: + return [op for op, state in self.op_state_info_list if state == self.STATIC_OP_STATE] + + def get_dynamic_shape_operators(self) -> List[Any]: + return [op for op, state in self.op_state_info_list if state == self.DYNAMIC_OP_STATE] diff --git a/profiler/advisor/common/profiling/msprof.py b/profiler/advisor/common/profiling/msprof.py new file mode 100644 index 0000000000..9453986b82 --- /dev/null +++ b/profiler/advisor/common/profiling/msprof.py @@ -0,0 +1,144 @@ +""" +msprof +""" +import logging +from typing import Dict, List + +from profiler.advisor.dataset.profiling.info_collection import TaskInfo +from profiler.advisor.dataset.profiling.profiling_parser import ProfilingParser + +logger = logging.getLogger() + + +class TaskChecker: + """ + check task info + """ + + def __init__(self): + self.sqe_keys = set() + + def is_sqe(self, task: TaskInfo) -> bool: + """check sqe""" + key = (task.pid, task.tid) + if task.args.get('name', '').endswith('_SQE'): + self.sqe_keys.add(key) + return False + + return key in self.sqe_keys + + +class Msprof(ProfilingParser): + """ + msprof + + """ + FILE_PATTERN = r"^msprof[_\d]+.json$" + FILE_PATTERN_MSG = "msprof_*.json" + FILE_INFO = "msprof" + + def __init__(self, path: str) -> None: + super().__init__(path) + self._tasks: List[TaskInfo] = [] + self._iteration_time = 0.0 + self._model_id = None + self._iteration_id = None + self._process_pid: Dict[str, str] = {} + self._min_time = 0.0 + self._max_time = 0.0 + self._data_process_time = 0.0 + self._start_point = 0.0 + + def parse_from_file(self, file: str): + if not self._parse_json(file): + return False + min_time = float('inf') + max_time = 0.0 + task_checker = TaskChecker() + is_iter = False + for item in self._raw_data: + task = TaskInfo(item) + if task.cat == "Iteration Time": + self._min_time = task.start_time + self._max_time = task.end_time + self._iteration_time = task.dur + is_iter = True + if task.cat == "Data_aug Bound" and "Data_aug Bound(us)" in task.args: + self._data_process_time = task.args["Data_aug Bound(us)"] + + if self._start_point == 0 and task.start_time > 0: + self._start_point = task.start_time + + if task_checker.is_sqe(task): + continue + + self._tasks.append(task) + self._parse_task(task) + + start_time = task.start_time + dur = task.dur + if start_time == -1 or dur == -1 or dur == 0: + continue + if start_time < min_time: + min_time = start_time + end_time = start_time + dur + if end_time > max_time: + max_time = end_time + if not is_iter: + self._iteration_time = dur + self._max_time = max_time + self._min_time = min_time + if self._tasks: + return True + return False + + def _parse_task(self, task): + if "Iteration Refresh" in task.name: + self._iteration_id = task.args.get("Iteration ID") + elif "Model ID" in task.name: + self._model_id = int(task.name.split(":")[1]) + elif "process_name" == task.name: + self._process_pid[task.args.get("name")] = task.pid + + @property + def step_time(self): + return self._iteration_time + self._data_process_time + + @property + def iteration_time(self): + return self._iteration_time + + @property + def iter_max_time(self): + return self._max_time + + @property + def iter_min_time(self): + return self._min_time + + @property + def data_process_time(self): + return self._data_process_time + + @property + def tasks(self): + return self._tasks + + @property + def model_id(self): + return self._model_id + + @property + def iteration_id(self): + return self._iteration_id + + @property + def process_pid(self): + return self._process_pid + + def __len__(self): + return len(self._tasks) + + @property + def start_point(self): + return self._start_point diff --git a/profiler/advisor/common/profiling/op_summary.py b/profiler/advisor/common/profiling/op_summary.py new file mode 100644 index 0000000000..d79439dbad --- /dev/null +++ b/profiler/advisor/common/profiling/op_summary.py @@ -0,0 +1,76 @@ +""" +summary +""" +import logging +from decimal import Decimal +from typing import List, Any + +from profiler.advisor.dataset.profiling.info_collection import OpInfo +from profiler.advisor.dataset.profiling.profiling_parser import ProfilingParser +from profiler.advisor.utils.utils import format_excel_title, lazy_property + +logger = logging.getLogger() + + +class OpSummary(ProfilingParser): + """ + op summary + """ + + FILE_PATTERN = r"^op_summary_[_\d]+\.csv$" + FILE_PATTERN_MSG = "op_summary_*.csv" + FILE_INFO = "op summary" + STATIC_OP_STATE = "static" + DYNAMIC_OP_STATE = "dynamic" + + def __init__(self, path: str) -> None: + super().__init__(path) + self.op_list: List[OpInfo] = [] + self._total_task_duration = 0.0 + self._total_task_wait_time = 0.0 + self._raw_data: List[List[str]] = [] + + def parse_from_file(self, file: str): + if not self._parse_csv(file): + return False + title_dict = dict(enumerate(self._raw_data[0])) + for op_data in self._raw_data[1:]: + op_info = OpInfo() + for idx, value in enumerate(op_data): + title = title_dict.get(idx, "") + formatted_title = format_excel_title(title) + if formatted_title == 'task_start_time' and 'us' in title and \ + value.replace('.', '').replace("E+", "").isnumeric(): + value = str(Decimal(value) * Decimal(1000)) + op_info.add_attr(formatted_title, value) + self.op_list.append(op_info) + self._total_task_duration += self.get_float(op_info.get_attr("task_duration")) + self._total_task_wait_time += self.get_float(op_info.get_attr("task_wait_time")) + if not self.op_list: + logger.error("No valid op info in %s", file) + return False + return True + + def get_static_shape_operators(self) -> List[Any]: + return [op_info.get_attr("op_name") for op_info in self.op_list if op_info.get_attr("op_state") == self.STATIC_OP_STATE] + + def get_total_task_duration(self): + """ + get total task duration of all operators + :return: + """ + return self._total_task_duration + + @lazy_property + def task_dict(self): + """ + task dict + """ + task_dict = {} + for op_info in self.op_list: + if op_info.op_name not in task_dict: + task_dict[op_info.op_name] = [op_info] + else: + task_dict[op_info.op_name].append(op_info) + + return task_dict diff --git a/profiler/advisor/common/profiling/tasktime.py b/profiler/advisor/common/profiling/tasktime.py new file mode 100644 index 0000000000..3ce09a7838 --- /dev/null +++ b/profiler/advisor/common/profiling/tasktime.py @@ -0,0 +1,75 @@ +""" +task time +""" +import logging +from typing import Dict, List + +from profiler.advisor.dataset.profiling.info_collection import TaskInfo +from profiler.advisor.dataset.profiling.profiling_parser import ProfilingParser + +logger = logging.getLogger() + +AICPU_TASK_TYPE = "AI_CPU" +AICORE_TASK_TYPE = "AI_CORE" + + +class TaskTime(ProfilingParser): + """ + task time info + """ + + FILE_PATTERN = r"^task_time_[_\d]+\.json$" + FILE_PATTERN_MSG = "task_time*.json" + FILE_INFO = "task time" + + def __init__(self, path: str) -> None: + super().__init__(path) + self._tasks: List[TaskInfo] = [] + self._aicore_tasks: List[TaskInfo] = [] + self._aicpu_tasks: List[TaskInfo] = [] + self._process_map: Dict[str, str] = {} + self._pid_map: Dict[str, str] = {} + + def get_aicpu_tasks(self): + """ + get aicpu tasks + :return: aicpu tasks + """ + return self._aicpu_tasks + + def get_aicore_tasks(self): + """ + get aicore tasks + :return: aicore tasks + """ + return self._aicore_tasks + + def parse_from_file(self, file: str): + if not self._parse_json(file): + return False + for item in self._raw_data: + if item.get("ph") != "M": # header + continue + if item.get("name") != "process_name": + continue + pid = item.get("pid") + pname = item["args"]["name"] + self._process_map[pid] = pname + self._pid_map[pname] = pid + for item in self._raw_data: + if item.get("ph") == "M": # header + continue + task = TaskInfo(item) + self._tasks.append(task) + if task.pid != self._pid_map.get("Task Scheduler"): + continue + if task.task_type == AICORE_TASK_TYPE: + self._aicore_tasks.append(task) + elif task.task_type == AICPU_TASK_TYPE: + self._aicpu_tasks.append(task) + self._aicore_tasks.sort(key=lambda x: x.start_time) + self._aicpu_tasks.sort(key=lambda x: x.start_time) + if not self._tasks: + logger.error("No valid task info in %s", file) + return False + return True diff --git a/profiler/advisor/config/profiling_data_version_config.yaml b/profiler/advisor/config/profiling_data_version_config.yaml new file mode 100644 index 0000000000..45f4b5c0f7 --- /dev/null +++ b/profiler/advisor/config/profiling_data_version_config.yaml @@ -0,0 +1,80 @@ +versions: + - version: 8.0.0 + dirs_pattern: + ^PROF_\d{6}_\d{17}_\w+$: + mindstudio_profiler_output: + [ op_summary, msprof ] + class_attr: + op_summary: OpSummary + msprof: Msprof + file_attr: + op_summary: ^op_summary_\d{14}\.csv$ + msprof: ^msprof_\d{14}\.json$ + + - version: 7.0.0 + dirs_pattern: + ^PROF_\d{6}_\d{17}_\w+$: + ^device_\d+$: + summary: + [ op_summary ] + timeline: + [ msprof, task_time ] + host: + sqlite: + [ ge_info ] + class_attr: + op_summary: OpSummary + task_time: TaskTime + msprof: Msprof + ge_info: GeInfo + file_attr: + op_summary: ^op_summary_\d+_\d+_\d{14}\.csv$ + task_time: ^task_time_\d+_\d+_\d{14}\.json$ + msprof: ^msprof_\d+_\d+_\d{14}\.json$ + ge_info: ge_info.db + + - version: 7.0.RC1 + dirs_pattern: + ^PROF_\d{6}_\d{17}_\w+$: + ^device_\d+$: + summary: + [ op_summary ] + timeline: + [ msprof, task_time ] + host: + sqlite: + [ ge_info ] + class_attr: + op_summary: OpSummary + task_time: TaskTime + msprof: Msprof + ge_info: GeInfo + file_attr: + op_summary: ^op_summary_\d+_\d+_\d+_\d{14}\.csv$ + task_time: ^task_time_\d+_\d+_\d+_\d{14}\.json$ + msprof: ^msprof_\d+_\d+_\d+_\d{14}\.json$ + ge_info: ge_info.db + + - version: 6.3.RC2 + dirs_pattern: + ^PROF_\d{6}_\d{17}_\w+$: + ^device_\d+$: + summary: + [ op_summary ] + timeline: + [ msprof, task_time ] + host: + sqlite: + [ ge_info ] + class_attr: + op_summary: OpSummary + task_time: TaskTime + msprof: Msprof + ge_info: GeInfo + file_attr: + op_summary: ^op_summary_\d+_\d+_\.csv$ + task_time: ^task_time_\d+_\d+_\.json$ + msprof: ^msprof_\d+_\d+_\.json$ + ge_info: ge_info.db + + diff --git a/profiler/advisor/dataset/cluster/cluster_dataset.py b/profiler/advisor/dataset/cluster/cluster_dataset.py index 4db50464ef..b8daedab08 100644 --- a/profiler/advisor/dataset/cluster/cluster_dataset.py +++ b/profiler/advisor/dataset/cluster/cluster_dataset.py @@ -18,8 +18,6 @@ class ClusterDataset(Dataset): def __init__(self, collection_path, data: dict, **kwargs) -> None: super().__init__(collection_path, data) - if not self.is_cluster_analysis_output_exist(): - self.cluster_analyze() def is_cluster_analysis_output_exist(self): """ @@ -34,6 +32,8 @@ class ClusterDataset(Dataset): return False def cluster_analyze(self): + if self.is_cluster_analysis_output_exist(): + return parameter = { Constant.COLLECTION_PATH: self.collection_path, Constant.ANALYSIS_MODE: "all" @@ -70,6 +70,7 @@ class ClusterStepTraceTimeDataSet(ClusterDataset): super().__init__(collection_path, data) def _parse(self): + self.cluster_analyze() step_data = self.load_csv_data(const.CLUSTER_STEP_TIME_CSV, ClusterStepTraceTimeBean) self._step_dict = self.formate_data(step_data) return True @@ -118,6 +119,7 @@ class ClusterCommunicationDataSet(ClusterDataset): return round(dividend / divisor, 4) def _parse(self): + self.cluster_analyze() communication_json = self.load_json_data(const.CLUSTER_COMM_JSON) self.process(communication_json) return True diff --git a/profiler/advisor/dataset/profiling/__init__.py b/profiler/advisor/dataset/profiling/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/dataset/profiling/builder_base.py b/profiler/advisor/dataset/profiling/builder_base.py new file mode 100644 index 0000000000..2bfe14f946 --- /dev/null +++ b/profiler/advisor/dataset/profiling/builder_base.py @@ -0,0 +1,39 @@ +""" +profiling base +""" +import logging +from typing import Dict, List + +from profiler.advisor.dataset.profiling.profiling_parser import ProfilingParser +from profiler.advisor.utils.utils import join_prof_path + +logger = logging.getLogger() + + +class ProfilingBuilderBase: + """ + profiling base + """ + DATA_LIST: List[Dict] = [] + + def __init__(self, path) -> None: + self._path = path + + def parse_data(self) -> bool: + """ + parse data for file in data_dir + """ + if isinstance(self, ProfilingParser): + return True + ret = False + for data in self.DATA_LIST: + class_name = data.get("class_name") + if class_name is not None: + if data.get("subdir_name"): + data_class = data.get("class_name")(join_prof_path(self._path, data.get("subdir_name"))) + else: + data_class = data.get("class_name")(self._path) + if data_class.parse_data(): + setattr(self, str(data.get("attr_name")), data_class) + ret = True + return ret diff --git a/profiler/advisor/dataset/profiling/db_manager.py b/profiler/advisor/dataset/profiling/db_manager.py new file mode 100644 index 0000000000..c9fb73c7cf --- /dev/null +++ b/profiler/advisor/dataset/profiling/db_manager.py @@ -0,0 +1,70 @@ +""" +connection manager +""" +import os +import re +from typing import List + +from sqlalchemy import MetaData, create_engine + + +class ConnectionManager: + """ + Connection Manager + """ + + def __init__(self, path, db_name): + self.db_path = os.path.join(path, db_name) + self.connection = create_engine(f'sqlite:///{self.db_path}') + self.metadata = MetaData() + self.metadata.reflect(bind=self.connection) + + def __call__(self, *args, **kwargs): + return self.connection + + @staticmethod + def check_db_exists(db_path:str, dbs:List) -> bool: + """ + check db exists + """ + if not os.path.isdir(db_path): + return False + for prof_db in dbs: + if not os.access(db_path, os.R_OK) or prof_db not in os.listdir(db_path): + return False + return True + + def check_table_exists(self, tables:List) -> bool: + """ + check table exists + """ + for table in tables: + if table not in self.metadata.tables: + return False + return True + + def check_column_exists(self, table_name:str, columns:List) -> bool: + """ + check column exists + """ + if table_name not in self.metadata.tables: + return False + for column in columns: + if column not in self.metadata.tables[table_name].columns: + return False + return True + + @classmethod + def get_connection(cls, path, dbs, tables=None, is_host=False): + """ + get connection + """ + if is_host: + pattern = r"/device_[0-9]" + path = re.sub(pattern, "/host", path) + if not cls.check_db_exists(path, dbs): + return None + conn = cls(path, dbs) + if tables and not conn.check_table_exists(tables): + return None + return conn diff --git a/profiler/advisor/dataset/profiling/device_info.py b/profiler/advisor/dataset/profiling/device_info.py new file mode 100644 index 0000000000..b58930777f --- /dev/null +++ b/profiler/advisor/dataset/profiling/device_info.py @@ -0,0 +1,61 @@ +""" +profiling info +""" +import json +import logging + +from profiler.advisor.config.config import Config +from profiler.advisor.utils.utils import get_file_path_from_directory + +logger = logging.getLogger() + + +class DeviceInfoParser: + """ + profiling info + device_id device 名称信息 + "aiv_num" ai vector 个数 + "ai_core_num" aicore 个数 + """ + DATA_LIST = [] + + def __init__(self, path) -> None: + self._path = path + + def parse_data(self) -> bool: + """ + parse profiling data + :return: true for success or false + """ + file_list = get_file_path_from_directory(self._path, lambda x: x.startswith("info.json.")) + if not file_list: + return False + for info in file_list: + if self._parse(info): + return True + return False + + @staticmethod + def _parse(info_file: str) -> bool: + if info_file.endswith("done"): + return False # skip info.json.0.done + try: + with open(info_file, encoding="utf-8") as file: + info = json.load(file) + except (IOError, ValueError) as error: + logger.error("Parse json info file %s failed : %s", info_file, error) + return False + if "DeviceInfo" not in info: + logger.error("No device info in json info file %s", info_file) + return False + config = Config() + for device_info in info["DeviceInfo"]: + if "id" in device_info: + config.set_config("device_id", device_info["id"]) + if "aiv_num" in device_info: + config.set_config("aiv_num", device_info["aiv_num"]) + if "ai_core_num" in device_info: + config.set_config("ai_core_num", device_info["ai_core_num"]) + return True + logger.error("No ai_core_num in json info file %s", info_file) + return False diff --git a/profiler/advisor/dataset/profiling/info_collection.py b/profiler/advisor/dataset/profiling/info_collection.py new file mode 100644 index 0000000000..b1f84313bb --- /dev/null +++ b/profiler/advisor/dataset/profiling/info_collection.py @@ -0,0 +1,270 @@ +""" +profiling info +""" +import decimal +import logging + +from profiler.advisor.utils.utils import lazy_property + +logger = logging.getLogger() + + +class Info: + """ + op info + """ + _attr_pre_fix_list = [""] + + def add_attr(self, key: str, value: str): + """ + add attr to op info + :param key: op info key + :param value: op info value + :return: None + """ + if not key or hasattr(self, key): + return + setattr(self, key, value) + + def has_attr(self, key: str, strict_mode=False): + """ + check if op info has attr key + :param key: attr key + :return: true or false + """ + if strict_mode: + return hasattr(self, key) + for prefix in self._attr_pre_fix_list: + attr = prefix + key + if hasattr(self, attr): + return True + return False + + def get_attr(self, key, strict_mode=False): + """ + get attr value by key + :param key: attr key + :return: attr value + """ + if strict_mode: + if hasattr(self, key): + return getattr(self, key) + else: + for prefix in self._attr_pre_fix_list: + attr = prefix + key + if key.startswith("mac") and prefix == "aiv_": + # e.g mac_ratio must match aic_mac_ratio, not aiv_mac_ratio + continue + if key.startswith("vec") and prefix == "aic_": + # e.g vec_ratio must match aiv_vec_ratio, not aic_vec_ratio + continue + if hasattr(self, attr): + return getattr(self, attr) + return "" + + def get_float_attr(self, attr, strict_mode=False): + """ + get attr value by key + :param key: attr key + :return: attr value + """ + try: + return float((self.get_attr(attr, strict_mode))) + except (ValueError, FloatingPointError): + pass + return 0 + + def get_decimal_attr(self, attr, strict_mode=False): + """ + get attr value by key + :param key: attr key + :return: attr value + """ + try: + return decimal.Decimal((self.get_attr(attr, strict_mode))) + except (ValueError, decimal.InvalidOperation): + pass + return decimal.Decimal(0) + + def get_attrs(self) -> dict: + """ + get attr list + :return: attr list + """ + return self.__dict__ + + +class OpInfo(Info): + """ + summary info + """ + + _attr_pre_fix_list = ["", "aic_", "aiv_"] + _mac_ratio_attrs = ["mac_ratio", "mac_fp16_ratio", "mac_int8_ratio", "aic_mac_ratio"] + _aicore_time_key = ["aicore_time", "aiv_time"] + _total_cycles_key = ["total_cycles", "aic_total_cycles", "aiv_total_cycles"] + + def __lt__(self, other): + return self.get_float_attr("task_start_time") < other.get_float_attr("task_start_time") + + @lazy_property + def is_cube_op(self) -> bool: + """ + check type of operator if cube or not + """ + for attr in self._mac_ratio_attrs: + if hasattr(self, attr): + try: + if float(getattr(self, attr)) > 0: + if hasattr(self, "ffts_type") and getattr(self, "ffts_type") == "1": + logger.warning( + "ffts type of op %s is vector buf mac ratio is not 0", getattr(self, "op_name") + ) + return True + except ValueError: + pass + # not cube op + if hasattr(self, "ffts_type") and getattr(self, "ffts_type") == "0": + logger.warning("ffts type of op %s is cube but mac ratio is 0", getattr(self, "op_name")) + return False + + @lazy_property + def has_mac_ratio(self) -> bool: + """ + check if op_info has mac ratio + """ + for attr in self._mac_ratio_attrs: + if attr in self.__dict__: + return True + return False + + def attr_sum(self, attr_list): + """sum of a list attrs""" + total = 0 + for attr in attr_list: + total += self.get_float_attr(attr, strict_mode=True) + return total + + def get_aicore_time(self): + """ + get sum of aicore time and ai vector core time + """ + return self.attr_sum(self._aicore_time_key) + + def get_total_cycles(self): + """ + get sum of total cycle for aicore and ai vector core + """ + return self.attr_sum(self._total_cycles_key) + + +class TaskInfo: + """ + task info + """ + EVENT_TYPE = {"metadata": ['M'], "duration": ['B', 'E'], "complete": ['X'], 'flow': ['s', 't', 'f']} + + def __init__(self, content: dict) -> None: + self._name = content.get("name", "") + self._pid = content.get("pid", 0) + self._tid = content.get("tid", 0) + self._start_time = float(content.get("ts", 0.0)) + self._dur = float(content.get("dur", 0.0)) + self._args = content.get("args", {}) + self._cat = content.get("cat", "") + self._id = content.get("id", "") + + @property + def pk_id(self): + """ + get id + :return: id + """ + return self._id + + @property + def pid(self): + """ + get pid + :return: pid + """ + return self._pid + + @property + def tid(self): + """ + get tid + :return: tid + """ + return self._tid + + @property + def task_type(self): + """ + get pid + :return: pid + """ + return self._args.get("Task Type", "NA") + + @property + def start_time(self): + """ + get starttime + :return: starttime + """ + return self._start_time + + @property + def end_time(self): + """ + get endtime + :return: endtime + """ + return self._start_time + self._dur + + @property + def dur(self): + """ + get duration + :return: duration + """ + return self._dur + + @property + def name(self): + """ + get task name + :return: task name + """ + return self._name + + @property + def stream_id(self): + """ + get stream_id + :return: steram id + """ + return self._args.get("Stream Id", "NA") + + @property + def task_id(self): + """ + get task id + :return: task_id + """ + return self._args.get("Task Id", "NA") + + @property + def args(self): + """ + get args of task + :return: args + """ + return self._args + + @property + def cat(self): + """ + get category of task + """ + return self._cat diff --git a/profiler/advisor/dataset/profiling/profiling_dataset.py b/profiler/advisor/dataset/profiling/profiling_dataset.py new file mode 100644 index 0000000000..836f30905f --- /dev/null +++ b/profiler/advisor/dataset/profiling/profiling_dataset.py @@ -0,0 +1,76 @@ +import logging +import os + +import yaml +from profiler.advisor.common import constant +from profiler.advisor.common.profiling.ge_info import GeInfo +from profiler.advisor.common.profiling.msprof import Msprof +from profiler.advisor.common.profiling.op_summary import OpSummary +from profiler.advisor.common.profiling.tasktime import TaskTime +from profiler.advisor.dataset.dataset import Dataset +from profiler.advisor.dataset.profiling.device_info import DeviceInfoParser +from profiler.advisor.utils.utils import join_prof_path + + +logger = logging.getLogger() + + +class ProfilingDataset(Dataset): + PROF_TYPE = "" + + def __init__(self, collection_path, data: dict, **kwargs) -> None: + self.cann_version = kwargs.get("cann_version", constant.DEFAULT_CANN_VERSION) + self.PROF_TYPE = kwargs.get("profiling_type", constant.DEFAULT_PROFILING_TYPE) + self.patterns = self.parse_pattern() + self.current_version_pattern = self.get_current_version_pattern() + super().__init__(collection_path, data) + + def _parse(self): + info = DeviceInfoParser(self.collection_path) + if info.parse_data(): + self._info = info + ret = False + if self.current_version_pattern is not None: + self.build_from_pattern(self.current_version_pattern["dirs_pattern"], self.collection_path) + ret = True + + return ret + + def build_from_pattern(self, dirs_pattern, current_path): + if isinstance(dirs_pattern, dict): + for key, value in dirs_pattern.items(): + self.build_from_pattern(value, join_prof_path(current_path, key)) + elif isinstance(dirs_pattern, list): + for item in dirs_pattern: + data_class = globals()[self.current_version_pattern.get('class_attr').get(item)] + data_class.FILE_PATTERN = self.current_version_pattern.get('file_attr').get(item) + data_object = data_class(current_path) + data_object.parse_data() + setattr(self, item, data_object) + else: + logger.warning(f"Unsupported arguments : %s to build %s", dirs_pattern, self.__class__.__name__) + + def get_current_version_pattern(self): + for version_config_dict in self.patterns['versions']: + if version_config_dict['version'] == self.cann_version: + return version_config_dict + return dict() + + def parse_pattern(self, config_path="config/profiling_data_version_config.yaml"): + + if not os.path.isabs(config_path): + config_path = os.path.join(os.path.dirname(__file__), + "../", "../", config_path) + + if not os.path.exists(config_path): + logger.warning("Skip parse profiling dataset, because %s does not exist.", config_path) + return [] + + with open(config_path, 'r') as f: + patterns = yaml.safe_load(f) + + return patterns + + def collection_path(self): + """collection_path""" + return self.collection_path diff --git a/profiler/advisor/dataset/profiling/profiling_parser.py b/profiler/advisor/dataset/profiling/profiling_parser.py new file mode 100644 index 0000000000..bb4caeb29e --- /dev/null +++ b/profiler/advisor/dataset/profiling/profiling_parser.py @@ -0,0 +1,132 @@ +import csv +import json +import os +import re +from typing import List, Dict + +from profiler.advisor.dataset.profiling.info_collection import logger +from profiler.advisor.utils.utils import get_file_path_from_directory, SafeOpen, format_excel_title + + +class ProfilingParser: + """ + profiling + """ + FILE_PATTERN = "" + FILE_PATTERN_MSG = "" + FILE_INFO = "" + FILE_PATH = "" + + def __init__(self, path: str) -> None: + self._path = path + self._raw_data: List[List[str]] = [] + self._filename = "" + + @staticmethod + def file_match_func(pattern): + """file match function""" + return lambda x: re.search(re.compile(pattern), x) + + def parse_data(self) -> bool: + """ + pase task time file + :return: true or false + """ + if self._parse_from_file(): + return True + return False + + def _parse_from_file(self): + file_list = get_file_path_from_directory(self._path, self.file_match_func(self.FILE_PATTERN)) + if not file_list: + return False + ## get last file + file = file_list[-1] + self.FILE_PATH = file + if len(file_list) > 1: + logger.warning("Multiple copies of %s were found, use %s", self.FILE_INFO, file) + return self.parse_from_file(file) + + @staticmethod + def get_float(data) -> float: + """ + get float or 0.0 + """ + try: + return float(data) + except (FloatingPointError, ValueError): + return 0.0 + + def parse_from_file(self, file): + """ + parse from file + """ + return False + + @staticmethod + def _check_csv_file_format(csv_file_name: str, csv_content: List[List[str]]): + if not csv_content: + logger.error("%s is empty", csv_file_name) + return False + return True + + def _parse_csv(self, file, check_csv=True) -> bool: + logger.debug("Parse file %s", file) + self._filename = os.path.splitext(os.path.basename(file))[0] + with SafeOpen(file, encoding="utf-8") as csv_file: + try: + csv_content = csv.reader(csv_file) + for row in csv_content: + self._raw_data.append(row) + if check_csv and not self._check_csv_file_format(file, self._raw_data): + logger.error("Invalid csv file : %s", file) + return False + except OSError as error: + logger.error("Read csv file failed : %s", error) + return False + + if not csv_file: + return False + if not self._raw_data: + logger.warning("File %s has no content", file) + return False + return True + + def _parse_json(self, file) -> bool: + logger.debug("Parse file %s", file) + self._filename = os.path.splitext(os.path.basename(file))[0] + try: + with open(file, encoding="utf-8") as json_file: + self._raw_data = json.load(json_file) + except (OSError, ValueError) as error: + logger.error("Parse json file %s failed : %s", file, error) + return False + return True + + def get_raw_data(self): + """ + get raw file name and data + """ + return self._filename, self._raw_data + + @staticmethod + def _get_csv_title(data: List, number=0, title_index=0): + """ + number = 0 replace (us) (ns).. + other replace " " to "_" + title_index: position of title default 0 + """ + title_dict: Dict[int, str] = {} + for idx, title in enumerate(data[title_index]): + if number == 0: + title_dict[idx] = format_excel_title(title) + else: + title_dict[idx] = title.replace(" ", "_") + return title_dict + + @property + def path(self): + """ + path + """ + return self._path diff --git a/profiler/advisor/display/html/templates/main.html b/profiler/advisor/display/html/templates/main.html index f1703c7d8c..251961d79d 100644 --- a/profiler/advisor/display/html/templates/main.html +++ b/profiler/advisor/display/html/templates/main.html @@ -72,7 +72,7 @@ table { width: 100%; - table-layout: auto; + table-layout: fixed; border-collapse: collapse; margin-top: 2px; margin-bottom: 5px; @@ -82,7 +82,7 @@ padding: 10px; word-wrap: break-word; word-break: break-all; - white-space: nowrap; + white-space: normal; border: 1px solid rgb(170, 169, 169); text-align: left; } @@ -140,7 +140,7 @@ {% for key, renders in render_list.items() %} {% if key == 'operator'%}
-

Profiling Operator Issues

+

computation

{% for render in renders %} {{render|safe}} @@ -159,7 +159,7 @@ {% endif %} {% endfor %}
diff --git a/profiler/advisor/display/html/templates/operator_ai_cpu.html b/profiler/advisor/display/html/templates/operator_ai_cpu.html new file mode 100644 index 0000000000..b3235a8802 --- /dev/null +++ b/profiler/advisor/display/html/templates/operator_ai_cpu.html @@ -0,0 +1,61 @@ +
+

AICPU Issues

+
+ + + + + + + + + + + + + +
DescriptionSuggestionElapsed Time(us)Time Ratio
{{ format_result.record.optimization_item.description|safe }}{{ format_result.suggestion|safe }}{{ format_result.task_duration|safe }}{{ format_result.record.statistics_item.task_duration_ratio|safe }}
+
+ {% for op_type, op_info in format_result.statistic %} +
{{ op_type|safe }}
+
+ + + + + + + + + + + +
Operator TypeCountsElapsed Time(us)
{{ op_info.summary.op_type|safe }}{{ op_info.summary.counts|safe }}{{ op_info.summary.total_duration|safe }}
+
+ {% for trace_stack, info in op_info.op_info_list %} +
+ {{ info.summary.op_type|safe }} | Input DType:({{info.op_info_list[0].input_data_types|safe}}) | Output DType:({{info.op_info_list[0].output_data_types|safe}}) | Counts:{{ info.summary.counts|safe}} | Elapsed Time(us):{{ + info.summary.total_duration|safe}} +
+
+ {% if info.op_info_list[0].suggestions|length > 0 %} +
+ {% for suggestion in info.op_info_list[0].suggestions %} +

+ Suggestion {{ loop.index|safe }}: {{suggestion|safe}} +

+ {% endfor %} +
+ {% else %} +

Suggestion 1: Modify code to avoid AICPU operator

+ {% endif %} +
+ {{ info.op_info_list[0].stack_info|safe }} +
+ {% endfor %} +
+
+ {% endfor %} +
+
+
\ No newline at end of file diff --git a/profiler/advisor/display/html/templates/operator_block_dim.html b/profiler/advisor/display/html/templates/operator_block_dim.html new file mode 100644 index 0000000000..4e2c832f62 --- /dev/null +++ b/profiler/advisor/display/html/templates/operator_block_dim.html @@ -0,0 +1,38 @@ +
+

Block Dim Issues

+
+ + + + + + + + + + + + + +
DescriptionSuggestionElapsed Time(us)Time Ratio
{{ format_result.record.optimization_item.description|safe }}{{ format_result.suggestion|safe }}{{ format_result.task_duration|safe }}{{ format_result.record.statistics_item.task_duration_ratio|safe }}
+
+ {% for op_type, op_info in format_result.statistic %} +
{{ op_type|safe }}
+
+ + + + + + + + + + + +
Operator TypeCountsElapsed Time(us)
{{ op_info.summary.op_type|safe }}{{ op_info.summary.counts|safe }}{{ op_info.summary.total_duration|safe }}
+
+ {% endfor %} +
+
+
\ No newline at end of file diff --git a/profiler/advisor/display/html/templates/operator_dynamic_shape.html b/profiler/advisor/display/html/templates/operator_dynamic_shape.html new file mode 100644 index 0000000000..59920b6c9e --- /dev/null +++ b/profiler/advisor/display/html/templates/operator_dynamic_shape.html @@ -0,0 +1,15 @@ +
+

Operator Dynamic Shape Issues

+
+ + + + + + + + + +
DescriptionSuggestion
{{ format_result.record.optimization_item.description|safe }}{{ format_result.suggestion|safe }}
+
+
\ No newline at end of file diff --git a/profiler/advisor/display/html/templates/operator_no_bound.html b/profiler/advisor/display/html/templates/operator_no_bound.html new file mode 100644 index 0000000000..cfbd20baad --- /dev/null +++ b/profiler/advisor/display/html/templates/operator_no_bound.html @@ -0,0 +1,38 @@ +
+

Operator No Bound Issues

+
+ + + + + + + + + + + + + +
DescriptionSuggestionElapsed Time(us)Time Ratio
{{ format_result.record.optimization_item.description|safe }}{{ format_result.suggestion|safe }}{{ format_result.task_duration|safe }}{{ format_result.record.statistics_item.task_duration_ratio|safe }}
+
+ {% for op_type, op_info in format_result.statistic %} +
{{ op_type|safe }}
+
+ + + + + + + + + + + +
Operator TypeCountsElapsed Time(us)
{{ op_info.summary.op_type|safe }}{{ op_info.summary.counts|safe }}{{ op_info.summary.total_duration|safe }}
+
+ {% endfor %} +
+
+
\ No newline at end of file diff --git a/profiler/advisor/interface/interface.py b/profiler/advisor/interface/interface.py index c920ad4ea2..ebe20baa2d 100644 --- a/profiler/advisor/interface/interface.py +++ b/profiler/advisor/interface/interface.py @@ -1,6 +1,7 @@ from collections import OrderedDict import os +from profiler.advisor.analyzer.computation.profiling_analyzer import ProfilingAnalyzer from profiler.advisor.analyzer.schedule.fusion_ops.fusion_ops_analyzer import TimelineFusionOpsAnalyzer from profiler.advisor.common.analyzer_scopes import SupportedScopes from profiler.advisor.utils.utils import Timer @@ -13,12 +14,14 @@ class Interface: "schedule": OrderedDict({ SupportedScopes.TIMELINE_FUSION_OPS: TimelineFusionOpsAnalyzer }), - "computation": OrderedDict(), + "computation": OrderedDict({ + SupportedScopes.PORFILING_OPERATOR_ANALYSIS: ProfilingAnalyzer + }), "communication": OrderedDict(), "overall": OrderedDict(), "dataloader": OrderedDict(), "cluster": OrderedDict({ - SupportedScopes.SKOW_RANK: SlowRankAnalyzer, + SupportedScopes.SLOW_RANK: SlowRankAnalyzer, SupportedScopes.SLOW_LINK: SlowLinkAnalyzer }) } diff --git a/profiler/advisor/rules/aicpu_rules.yaml b/profiler/advisor/rules/aicpu_rules.yaml new file mode 100644 index 0000000000..053f4150e8 --- /dev/null +++ b/profiler/advisor/rules/aicpu_rules.yaml @@ -0,0 +1,107 @@ +DataTypeSuggeation: &DataTypeSuggeation "Data type {} in {} operator may cause AICPU issues, Try to convert to {} if possible." + +CommonChecker: + - DataTypeChecker: + cann_version: 7.0.RC1 + op_type: [ __ALL__ ] + ignore_type: [ cast, tensorequal, equal, nonzero, mul ] + input: [ float, float32, float16, bool, int32, uint32, int64, uint64, int8, uint8, int16, uint16, dt_bf16 ] + output: [ float, float32, float16, bool, int32, uint32, int64, uint64, int8, uint8, int16, uint16, dt_bf16 ] + suggestion: *DataTypeSuggeation + + - DataTypeChecker: + cann_version: 7.0.RC1 + op_type: [ cast ] + input: [ float, float32, float16, bool, int32, uint32, int64, uint64, uint8, dt_bf16 ] + output: [ float, float32, float16, bool, int32, uint32, int64, uint64, uint8, dt_bf16 ] + suggestion: *DataTypeSuggeation + + - DataTypeChecker: + cann_version: 7.0.RC1 + op_type: [ tensorequal ] + input: [ float, float32, float16, bool, int32, int8, uint8 ] + output: [ bool ] + suggestion: *DataTypeSuggeation + + - DataTypeChecker: + cann_version: 7.0.RC1 + op_type: [ equal ] + input: [ float, float32, float16, bool, int32, int64, int8, uint8 ] + output: [ bool ] + suggestion: *DataTypeSuggeation + + - DataTypeChecker: + cann_version: 7.0.RC1 + op_type: [ nonzero ] + input: [ float16, bool, dt_bf16 ] + output: [ int64 ] + suggestion: *DataTypeSuggeation + + - DataTypeChecker: + cann_version: 7.0.RC1 + op_type: [ mul ] + input: [ float, float32, float16, bool, int32, uint32, int64, uint64, int8, uint8, dt_bf16 ] + output: [ float, float32, float16, bool, int32, uint32, int64, uint64, int8, uint8, dt_bf16 ] + suggestion: *DataTypeSuggeation + + - DataTypeChecker: + cann_version: 7.0.0 + op_type: [ __ALL__ ] + ignore_type: [ cast, tensorequal, equal, nonzero, mul ] + input: [ float, float32, float16, dt_bf16, float64, bool, int32, int64, int8, uint8, int16, complex64, complex128 ] + output: [ float, float32, float16, dt_bf16, float64, bool, int32, int64, int8, uint8, int16, complex64, complex128 ] + suggestion: *DataTypeSuggeation + + - DataTypeChecker: + cann_version: 7.0.0 + op_type: [ cast ] + input: [ float, float32, float16, bool, int32, uint32, int64, uint64, uint8, dt_bf16 ] + output: [ float, float32, float16, bool, int32, uint32, int64, uint64, uint8, dt_bf16 ] + suggestion: *DataTypeSuggeation + + - DataTypeChecker: + cann_version: 7.0.0 + op_type: [ tensorequal ] + input: [ float, float32, float16, dt_bf16, float64, bool, int32, int8, uint8 ] + output: [ bool ] + suggestion: *DataTypeSuggeation + + - DataTypeChecker: + cann_version: 7.0.0 + op_type: [ equal ] + input: [ float, float32, float16, dt_bf16, float64, bool, int32, int64, int8, uint8 ] + output: [ bool ] + suggestion: *DataTypeSuggeation + + - DataTypeChecker: + cann_version: 7.0.0 + op_type: [ mul ] + input: [ float, float32, float16, dt_bf16, float64, bool, int32, int64, int8, uint8, complex64 ] + output: [ float, float32, float16, dt_bf16, float64, bool, int32, int64, int8, uint8, complex64 ] + suggestion: *DataTypeSuggeation + +ExampleGuideChecker: + - IndexPutChecker: + op_type: [index] + url: "https://wiki.huawei.com/domains/41510/wiki/76339/WIKI202311152358721?title=Index" + suggestion: 'Please modify source code followed by this LINK, try to replace index operator with equivalent operator.' + + - NonzeroChecker: + op_type: [ indexput, indexputv2 ] + url: "https://wiki.huawei.com/domains/41510/wiki/76339/WIKI202311152358721?title=IndexPut" + suggestion: 'Please modify source code followed by this LINK, try to replace indexput operator with equivalent operator.' + + - CastChecker: + op_type: [ argmin ] + url: "https://wiki.huawei.com/domains/41510/wiki/76339/WIKI202311152358721?title=ArgMin" + suggestion: 'Please update your cann-tookit to at least 7.0.RC1 version by this LINK.' + + - CastChecker: + op_type: [ unique ] + url: "https://wiki.huawei.com/domains/41510/wiki/76339/WIKI202311152358721?title=unique" + suggestion: 'Please modify source code followed by this LINK, try to replace unique operator with equivalent operator.' + + - CastChecker: + op_type: [ nonzero ] + url: "https://wiki.huawei.com/domains/41510/wiki/76339/WIKI202311152358721?title=unique" + suggestion: 'Please modify source code followed by this LINK, try to replace nonzero operator with equivalent operator.' \ No newline at end of file diff --git a/profiler/advisor/utils/utils.py b/profiler/advisor/utils/utils.py index 1a4444f1ec..f338fc7dd4 100644 --- a/profiler/advisor/utils/utils.py +++ b/profiler/advisor/utils/utils.py @@ -70,9 +70,9 @@ def singleton(cls): :param cls: any class :return: singleton handle - When using the singleton function, you need to manually specify arg='dataSet_path'. Otherwise, the singleton function + When using the singleton function, you need to manually specify collection_path='dataSet_path'. Otherwise, the singleton function is initialized by class name. - if cls has 'arg' property, _instance map will build by class_name and 'arg', the default value of + if cls has 'collection_path' property, _instance map will build by class_name and 'collection_path', the default value of collection path is class absolute path. _instance = {cls.name: {collection_path: instance}} @@ -277,7 +277,8 @@ def load_parameter(parameter, default): def get_supported_subclass(clazz: VersionControl.__class__, cann_version: str): """ - Returns a list of subclasses that support the specified version + Returns a list of subclasses that support the specified version, because of the __subclasses__(), + you need to import the all subclass first :param clazz: Class name which is extends to VersionControl.__class__ :param cann_version: The CANN software version :return: The list of subclasses that support the specified CANN version diff --git a/profiler/cli/entrance.py b/profiler/cli/entrance.py index d9b5b10da7..1164a78cd1 100644 --- a/profiler/cli/entrance.py +++ b/profiler/cli/entrance.py @@ -53,7 +53,7 @@ advisor_cli.add_command(compare_cli, name="compare") if __name__ == '__main__': advisor_cli.main( - ["analyze", "schedule", "-d", - r"/home/ma-user/work/profiling", + ["analyze", "all", "-d", + r"C:/xxx/profiling_data", ] ) diff --git a/profiler/test/tools/__init__.py b/profiler/test/tools/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/test/tools/tool.py b/profiler/test/tools/tool.py new file mode 100644 index 0000000000..ee4b6f9bb1 --- /dev/null +++ b/profiler/test/tools/tool.py @@ -0,0 +1,38 @@ +import os +import re +import shutil +import shlex +from subprocess import Popen, PIPE + + +def delete_file(pattern, work_path): + file_list = os.listdir(work_path) + for file_name in file_list: + if re.match(pattern, file_name): + + os.remove(os.path.join(work_path, file_name)) + + +def recover_env(work_path="./"): + if os.path.exists("./log"): + shutil.rmtree("./log") + + if os.path.exists("./tune_ops_file.cfg"): + os.remove("./tune_ops_file.cfg") + + delete_file(r"ma_advisor_+", work_path) + + +def run_command(cmd): + # Make sure the process output can be displayed on the console + p = Popen(shlex.split(cmd, posix=False), stdout=PIPE, bufsize=0, universal_newlines=False) + p.wait() + + +def init_env(): + test_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../", "data", + "asight-0.3.52.dev0+ge3f3b41.d20231111-py3-none-any.whl")) + try: + import asight + except Exception: + run_command(f"pip install {test_dir}") diff --git a/profiler/test/ut/advisor/profiling/__init__.py b/profiler/test/ut/advisor/profiling/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/test/ut/advisor/profiling/test_profiling_analyzer.py b/profiler/test/ut/advisor/profiling/test_profiling_analyzer.py new file mode 100644 index 0000000000..e6c11f09db --- /dev/null +++ b/profiler/test/ut/advisor/profiling/test_profiling_analyzer.py @@ -0,0 +1,42 @@ +import os +import unittest + +from build.lib.profiler.advisor.analyzer.computation.profiling_analyzer import ProfilingAnalyzer +from profiler.advisor.common.constant import CANN_VERSION_C15 +from profiler.advisor.utils.utils import get_supported_subclass +from test.tools.tool import recover_env + + +class TestProfilingAnalyzer(unittest.TestCase): + @classmethod + def tearDownClass(cls) -> None: + recover_env() + + # def test_profiling_optimize_and_make_render(self): + # data_root_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + # "data/profiling_result", "cann700cnnTest_0001_20240113084145.001_ascend_pt") + # kwargs = { + # "analysis_mode": "profiling", + # "data_dir": data_root_dir + # } + + # data_list = Collector().load(white_list=['timeline_event_dataset', 'profiling_dataset'], **kwargs) + # # 获取所有ProfilingAnalyzerBase支持cann版本的子类 + # classes = get_supported_subclass(ProfilingAnalyzer, CANN_VERSION_C15) + + # results_list = [] + # for analyzer_clazz in classes: + # analyzer = analyzer_clazz(CANN_VERSION_C15) + # results_list.append(analyzer.optimize(data=data_list)) + + # # there has results_list and one problem dynamic shape here + # self.assertTrue(len(results_list) == 1) + + # if hasattr(analyzer, 'html_render'): + # analyzer.html_render.render_html() + # analyzer.html_render.save_to_file(f'ma_advisor_test.html') + + # self.assertTrue(hasattr(analyzer, 'html_render')) + + + diff --git a/profiler/test/ut/advisor/profiling/test_profiling_dataset.py b/profiler/test/ut/advisor/profiling/test_profiling_dataset.py new file mode 100644 index 0000000000..c312c1650e --- /dev/null +++ b/profiler/test/ut/advisor/profiling/test_profiling_dataset.py @@ -0,0 +1,46 @@ +import os +import unittest + +from profiler.advisor.config.config import Config +from profiler.advisor.common import constant +from test.tools.tool import recover_env + + +class TestProfilingDataset(unittest.TestCase): + + def setUp(self): + self.data_root_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + "data/profiling_result", "cann700cnnTest_0001_20240113084145.001_ascend_pt") + kwargs = { + "analysis_mode": "profiling", + "data_dir": self.data_root_dir + } + self.data_list = Collector().load(white_list=['timeline_event_dataset', 'profiling_dataset'], **kwargs) + + @classmethod + def tearDownClass(cls) -> None: + recover_env() + + def test_profiling_dataset_build(self): + # check profiling data dir whether exists. + self.assertTrue(os.path.exists(self.data_root_dir)) + self.assertTrue(self.data_list.__contains__('profiling_dataset_base')) + + self.assertTrue(self.data_list['profiling_dataset_base'][0]) + # check ge_info.db in profiling data. + self.assertTrue(hasattr(self.data_list['profiling_dataset_base'][0], 'ge_info')) + self.assertTrue(len(self.data_list['profiling_dataset_base'][0].ge_info.op_state_info_list) > 0) + # check op_summary in profiling data. + self.assertTrue(hasattr(self.data_list['profiling_dataset_base'][0], 'op_summary')) + self.assertTrue(len(self.data_list['profiling_dataset_base'][0].op_summary.op_list) > 0) + # check task_time in profiling data. + self.assertTrue(hasattr(self.data_list['profiling_dataset_base'][0], 'task_time')) + self.assertTrue(len(self.data_list['profiling_dataset_base'][0].task_time._tasks) > 0) + # check msprof in profiling data. + self.assertTrue(hasattr(self.data_list['profiling_dataset_base'][0], 'msprof')) + self.assertTrue(len(self.data_list['profiling_dataset_base'][0].msprof.tasks) > 0) + + def test_profiling_type(self): + # check profiling type + self.assertTrue(hasattr(self.data_list['profiling_dataset_base'][0], 'PROF_TYPE')) + self.assertTrue(self.data_list['profiling_dataset_base'][0].PROF_TYPE in constant.SUPPORTED_PROFILING_TYPE) diff --git a/profiler/test/ut/advisor/test_utils.py b/profiler/test/ut/advisor/test_utils.py new file mode 100644 index 0000000000..a99b2c2475 --- /dev/null +++ b/profiler/test/ut/advisor/test_utils.py @@ -0,0 +1,49 @@ +import os +import unittest + +from profiler.advisor.analyzer.computation.aicpu.aicpu_checker import AicpuChecker +from profiler.advisor.analyzer.computation.op_compile.dynamic_shape_checker import DynamicShapeChecker +from profiler.advisor.analyzer.computation.bound.block_dim_checker import BlockDimChecker +from profiler.advisor.analyzer.computation.bound.operator_bound_checker import OperatorBoundChecker +from profiler.advisor.analyzer.computation.operator_checker import OperatorChecker +import profiler.advisor.common.constant as constant +from profiler.advisor.utils.utils import get_supported_subclass, singleton + + +@singleton +class SingletonTest: + @property + def timeline_dir(self) : + return self._timeline_dir + + @property + def id(self): + return self._id + + def __init__(self, collection_path=None, **kwargs) -> None: + self._timeline_dir = collection_path + self._id = kwargs.get('id') + + +class TestProfilingAnalyzer(unittest.TestCase): + @classmethod + def test_get_supported_subclass(cls): + clazz = get_supported_subclass(OperatorChecker, constant.DEFAULT_CANN_VERSION) + assert clazz.__contains__(OperatorBoundChecker) + assert clazz.__contains__(AicpuChecker) + assert clazz.__contains__(DynamicShapeChecker) + assert clazz.__contains__(BlockDimChecker) + + @classmethod + def test_singleton(cls): + single1 = SingletonTest(collection_path="data_path_1", id='single1') + single2 = SingletonTest(collection_path="data_path_2", id='single2') + single3 = SingletonTest(collection_path="data_path_1", id='single3') + assert single1.id != single2.id + assert single1.id == single3.id + + + +if __name__ == '__main__': + TestProfilingAnalyzer.test_get_supported_subclass() + TestProfilingAnalyzer.test_singleton() \ No newline at end of file diff --git a/setup.py b/setup.py index 8ee18763b5..0c0306444b 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ extras = { with open('requirements/build.txt', 'r') as f: requires = f.read().splitlines() -with open('requirements/test.txt', 'r') as f: +with open('requirements/tests.txt', 'r') as f: tests_requires = f.read().splitlines() tests_requires.extend(set(requires)) @@ -24,7 +24,7 @@ with open('version.txt', 'r') as f: version = f.read().strip() setup( - name="att-advisor", + name="msprof-analyze", version=version, description="Ascend advisor tools", packages=find_packages(), @@ -35,7 +35,7 @@ setup( tests_require=tests_requires, entry_points=""" [console_scripts] - att-advisor=profiler.cli.entrance:advisor_cli + msprof-analyze=profiler.cli.entrance:advisor_cli """ ) -- Gitee From 33118278a0e2ae381a41aafb4f4ed8bcdcf84d53 Mon Sep 17 00:00:00 2001 From: fanxiaotong Date: Wed, 15 May 2024 11:03:05 +0800 Subject: [PATCH 12/21] framework --- .../computation/npu_fused/__init__.py | 14 + .../npu_fused/compute_advice_base.py | 118 +++++++++ .../computation/npu_fused/csv_analyzer.py | 81 ++++++ .../computation/npu_fused/json_analyzer.py | 55 ++++ .../computation/npu_fused/npu_fused_advice.py | 113 ++++++++ .../computation/npu_fused/npu_slow_advice.py | 124 +++++++++ .../analyzer/computation/npu_fused/op_perf.py | 193 ++++++++++++++ .../overall/overall_summary_analyzer.py | 246 ++++++++++++++++++ profiler/advisor/common/constant.py | 229 ++++++++++++++++ profiler/advisor/common/trace_view_json.py | 209 +++++++++++++++ .../advisor/common/trace_view_preprocessor.py | 208 +++++++++++++++ profiler/advisor/interface/interface.py | 5 +- profiler/cluster_analyse/cluster_analysis.py | 14 +- .../common_func/file_manager.py | 4 +- .../compute_advice/test_npu_slow_advice.py | 12 +- 15 files changed, 1608 insertions(+), 17 deletions(-) create mode 100644 profiler/advisor/analyzer/computation/npu_fused/__init__.py create mode 100644 profiler/advisor/analyzer/computation/npu_fused/compute_advice_base.py create mode 100644 profiler/advisor/analyzer/computation/npu_fused/csv_analyzer.py create mode 100644 profiler/advisor/analyzer/computation/npu_fused/json_analyzer.py create mode 100644 profiler/advisor/analyzer/computation/npu_fused/npu_fused_advice.py create mode 100644 profiler/advisor/analyzer/computation/npu_fused/npu_slow_advice.py create mode 100644 profiler/advisor/analyzer/computation/npu_fused/op_perf.py create mode 100644 profiler/advisor/analyzer/overall/overall_summary_analyzer.py create mode 100644 profiler/advisor/common/trace_view_json.py create mode 100644 profiler/advisor/common/trace_view_preprocessor.py diff --git a/profiler/advisor/analyzer/computation/npu_fused/__init__.py b/profiler/advisor/analyzer/computation/npu_fused/__init__.py new file mode 100644 index 0000000000..8400fd5ecd --- /dev/null +++ b/profiler/advisor/analyzer/computation/npu_fused/__init__.py @@ -0,0 +1,14 @@ +# Copyright (c) 2023, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/profiler/advisor/analyzer/computation/npu_fused/compute_advice_base.py b/profiler/advisor/analyzer/computation/npu_fused/compute_advice_base.py new file mode 100644 index 0000000000..3916de201b --- /dev/null +++ b/profiler/advisor/analyzer/computation/npu_fused/compute_advice_base.py @@ -0,0 +1,118 @@ +# Copyright (c) 2023, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import abstractmethod +from collections import defaultdict +import os + +from profiler.cluster_analyse.common_func.file_manager import FileManager +from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.advisor.common import constant as const + + +class ComputeAdviceBase(BaseAnalyzer): + ASCEND_PT = 'ascend_pt' + ASCEND_PROFILER_OUTPUT = 'ASCEND_PROFILER_OUTPUT' + KERNEL_DETAIL_FILE = "kernel_details.csv" + TRACE_VIEW_FILE = "trace_view.json" + + def __init__(self, collection_path: str, n_processes: int = 1, cann_version=const.DEFAULT_CANN_VERSION, + torch_version=const.DEFAULT_TORCH_VERSION, **kwargs): + super().__init__(collection_path, n_processes, cann_version, torch_version, **kwargs) + self.kernel_details_path = "" + self.has_preparse = False + self.preparse_data = defaultdict(list) + self.call_stack = None + self.trace_view_path = "" + + def path_check(self): + """ + check whether input path is valid + """ + if not os.path.exists(self.collection_path): + print("[ERROR] Path: {} is not exist.".format(self.collection_path)) + return False + if os.path.isdir(self.collection_path) and self.collection_path.endswith("ascend_pt"): + self.kernel_details_path = os.path.join(self.collection_path, "ASCEND_PROFILER_OUTPUT", + "kernel_details.csv") + if not os.path.exists(self.kernel_details_path): + print("[ERROR] kernel_details.csv is not exist in the Path: {}.".format( + os.path.join(self.collection_path, "ASCEND_PROFILER_OUTPUT"))) + return False + elif os.path.isfile(self.collection_path) and os.path.basename(self.collection_path) == "kernel_details.csv": + self.kernel_details_path = self.collection_path + else: + print("[ERROR] Please input ascend_pt or kernel_details.csv") + return False + print("[INFO] Start to analyse the target file: {}".format(self.kernel_details_path)) + self.preparse() + return True + + def has_callstack(self): + if self.call_stack is not None: + return self.call_stack + profiler_info_json_path = "" + for file in os.listdir(self.collection_path): + if file.startswith("profiler_info"): + profiler_info_json_path = os.path.join(self.collection_path, file) + break + if not profiler_info_json_path: + self.call_stack = False + return self.call_stack + self.trace_view_path = os.path.join(self.collection_path, self.ASCEND_PROFILER_OUTPUT, "trace_view.json") + if not os.path.exists(profiler_info_json_path) or not os.path.exists(self.trace_view_path): + self.call_stack = False + return self.call_stack + info = FileManager.read_json_file(profiler_info_json_path) + if not info.get("config") or not info.get("config").get("common_config") \ + or not info.get("config").get("common_config").get("with_stack"): + self.call_stack = False + return self.call_stack + activities = info.get("config").get("common_config").get("activities") + if not activities or "ProfilerActivity.CPU" not in activities: + self.call_stack = False + return self.call_stack + self.call_stack = info.get("config").get("common_config").get("with_stack") + return self.call_stack + + @abstractmethod + def run(self): + """ + analyze profiling data and advice + """ + + def output(self): + """ + output relevant data + """ + self.output_format_data[self.DATA] = self.cur_data + self.output_format_data[self.BOTTLENECK] = self.cur_bottleneck + self.output_format_data[self.ADVICE] = self.cur_advice + + def preparse(self): + if self.has_preparse: + return + + def optimize(self): + pass + + def make_record(self): + """ + make record for what and how to optimize + """ + pass + + def make_render(self): + pass diff --git a/profiler/advisor/analyzer/computation/npu_fused/csv_analyzer.py b/profiler/advisor/analyzer/computation/npu_fused/csv_analyzer.py new file mode 100644 index 0000000000..c85c14d618 --- /dev/null +++ b/profiler/advisor/analyzer/computation/npu_fused/csv_analyzer.py @@ -0,0 +1,81 @@ +# Copyright (c) 2023, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import multiprocessing + +import pandas as pd +import numpy as np + +from common_func_advisor.constant import Constant +from .op_perf import OpPerfFactory + + +class CSVAnalyzer: + def __init__(self, path) -> None: + self._path = path + + def process(self): + df = pd.read_csv(self._path, dtype={"Start Time(us)": str}) + # 分析是否存在可融合的算子 + op_type_list = df["Type"].tolist() + duration_list = df["Duration(us)"].tolist() + start_times = df["Start Time(us)"].tolist() + # 去除末尾的\t分隔符 + start_times = [start_time[:-1] for start_time in start_times] + result_list = [] + for pattern in Constant.PATTERN_DICT.keys(): + result_list.extend(self.find_all_sub_lists(op_type_list, duration_list, start_times, pattern)) + data_frame = pd.DataFrame(result_list) + data_frame.columns = ["pattern_name", "pattern", "len", "count", "duration sum(us)", "op durations(us)", + "index", "first_timestamp"] + return data_frame + + @staticmethod + def find_all_sub_lists(op_type_list, duration_list, start_times, expect_sub_list): + # 创建一个空字典,用来存储子列表和它们的出现次数和起始位置 + len_sub_list = len(expect_sub_list) + expect_sub_list = tuple(expect_sub_list) + sublist_dict = {} + # 遍历列表,从每个位置开始,取长度为N的子列表 + for i in range(len(op_type_list) - len_sub_list + 1): + sublist = tuple(op_type_list[i:i + len_sub_list]) + if sublist != expect_sub_list: + continue + # 如果子列表已经在字典中,就增加它的出现次数,否则就初始化为1 + if sublist in sublist_dict: + # count + sublist_dict[sublist][0] += 1 + # index + sublist_dict[sublist][1].append(i) + # total duration + sublist_dict[sublist][2] += sum(duration_list[i:i + len_sub_list]) + # duration + zip_data = zip(sublist_dict[sublist][3], duration_list[i:i + len_sub_list]) + sublist_dict[sublist][3] = [a + b for a, b in zip_data] + else: + sublist_dict[sublist] = [1, [i], sum(duration_list[i:i + len_sub_list]), + duration_list[i:i + len_sub_list], len_sub_list, start_times[i]] + # 创建一个空列表,用来存储所有重复的子列表 + repeated_sublists = [] + for sublist, (count, index, duration_sum, op_durations, sublist_len, first_time) in sublist_dict.items(): + pattern_name = Constant.PATTERN_DICT.get(sublist, "unknown") + op_durations = [round(num, 2) for num in op_durations] + repeated_sublists.append([pattern_name, sublist, sublist_len, count, + duration_sum, op_durations, index, first_time]) + if len(sublist_dict) == 0: + pattern_name = Constant.PATTERN_DICT.get(expect_sub_list, "unknown") + repeated_sublists.append([pattern_name, expect_sub_list, 0, 0, 0, 0, 0, 0]) + # 返回所有重复的子列表 + return repeated_sublists diff --git a/profiler/advisor/analyzer/computation/npu_fused/json_analyzer.py b/profiler/advisor/analyzer/computation/npu_fused/json_analyzer.py new file mode 100644 index 0000000000..fd2a72ffa3 --- /dev/null +++ b/profiler/advisor/analyzer/computation/npu_fused/json_analyzer.py @@ -0,0 +1,55 @@ +# Copyright (c) 2024, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pandas as pd + +from common_func_advisor.trace_view_json import TraceViewJson + + +class JSONAnalyzer(object): + def __init__(self, path): + self._path = path + + def get_custom_code(self, data: pd.DataFrame, ts_col: str, output_col: str): + trace_json = TraceViewJson(self._path) + callstacks = pd.DataFrame(columns=[output_col]) + + for i, row in data.iterrows(): + if ts_col not in data.columns.tolist(): + print("[ERROR] No {} col found in data columns.".format(ts_col)) + return callstacks + timestamp = row[ts_col] + flow_event = trace_json.get_torch_2_npu_flow_event(timestamp) + if not flow_event.valid(): + print("[ERROR] Get flow event failed for pattern {}.".format(row['pattern'])) + callstacks.loc[i] = "" + continue + flow_event_s_key = flow_event.s_point_ts + python_dur_events = trace_json.get_python_dur_events_contain_ts(flow_event_s_key) + if not python_dur_events: + print("[ERROR] No python dur event found for pattern {}.".format(row['pattern'])) + callstacks.loc[i] = "" + continue + # 保持新老版本callstack兼容性 + if python_dur_events[0].args.get("Call stack"): + # 旧版本 + callstack = python_dur_events[0].args.get("Call stack").split(";") + else: + python_dur_events.sort(key=lambda e: e.ts) + # 新版本 + callstack = [event.name for event in python_dur_events if event.cat == "python_function"] + callstack_str = "\n".join(callstack) + callstacks.loc[i] = callstack_str + return callstacks diff --git a/profiler/advisor/analyzer/computation/npu_fused/npu_fused_advice.py b/profiler/advisor/analyzer/computation/npu_fused/npu_fused_advice.py new file mode 100644 index 0000000000..7ec711d800 --- /dev/null +++ b/profiler/advisor/analyzer/computation/npu_fused/npu_fused_advice.py @@ -0,0 +1,113 @@ +# Copyright (c) 2023, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from abc import ABC + +import pandas as pd + +from profiler.advisor.analyzer.computation.npu_fused.compute_advice_base import ComputeAdviceBase +from profiler.advisor.analyzer.computation.npu_fused.csv_analyzer import CSVAnalyzer +from profiler.advisor.analyzer.computation.npu_fused.json_analyzer import JSONAnalyzer +from profiler.advisor.common import constant as const +from profiler.advisor.result.item import OptimizeItem, OptimizeRecord + + +class NpuFusedAdvice(ComputeAdviceBase, ABC, ): + NPU_FUSED_ADVICE = "npu_fused_advice" + + def __init__(self, collection_path: str, n_processes: int = 1, cann_version=const.DEFAULT_CANN_VERSION, + torch_version=const.DEFAULT_TORCH_VERSION, **kwargs): + super().__init__(collection_path, n_processes, cann_version, torch_version, **kwargs) + self.cur_data = dict() + self.cur_bottleneck = str() + self.cur_advice = str() + self.kernel_details_path = "" + self.call_stack = None + + def run(self): + if not self.path_check(): + return self.output_format_data + self.process() + self.output() + return self.output_format_data + + def process(self): + csv_analyzer = CSVAnalyzer(self.kernel_details_path) + all_pattern_data = csv_analyzer.process() + all_pattern_data = all_pattern_data.sort_values(by='duration sum(us)', ascending=False) + filter_data = all_pattern_data.get(all_pattern_data.get("duration sum(us)", 0) > 0) + if not self.has_callstack(): + print("[Warning] No call stack info found, advice will be incomplete") + self.cur_data = filter_data + else: + json_analyzer = JSONAnalyzer(self.trace_view_path) + custom_code = json_analyzer.get_custom_code(filter_data, "first_timestamp", "custom code") + self.cur_data = pd.concat([filter_data, custom_code], axis=1) + op_num = len(self.cur_data.index) + op_dur = filter_data["duration sum(us)"].sum() + if op_num > 0: + index = 0 + self.cur_bottleneck = f"The computing time of fusable op is {round(op_dur, 2)} ms." + self.cur_advice = "" + for _, row in self.cur_data.iterrows(): + advice = f"Advice {index}:\n" + cur_op = "[" + ", ".join(row.loc["pattern"]) + "]" + npu_fused_op = row.loc["pattern_name"] + advice += f"Replace {cur_op} with {npu_fused_op}. " + if self.call_stack: + advice += f"This pattern first happened in: \n{row['custom code']}" + if index != op_num - 1: + advice += "\n" + index += 1 + self.cur_advice += advice + + def optimize(self): + if not self.path_check(): + return self.output_format_data + self.process() + self.output() + return self.output_format_data + + def make_record(self): + """ + make record for what and how to optimize + """ + optimization_item = OptimizeItem( + NpuFusedAdvice.NPU_FUSED_ADVICE, + self.bottleneck_str, + self.cur_advices + ) + self.result.add(OptimizeRecord(optimization_item)) + + # self.result.add_detail(const.BOTTLENECK, self.bottleneck_table["headers"], self.bottleneck_table["data"][0]) + # for data_type, data in self.cur_data.items(): + # if data: + # self.result.add_detail(const.DATA + data_type, self.cur_data_table[data_type]["headers"], self.cur_data_table[data_type]["data"][0]) + + def make_render(self): + result_for_html = { + "Description" : self.cur_bottleneck, + "suggestion" : self.cur_advice, + "details" : [{}] + } + + self.html_render.render_template(key="cluster", + title=NpuFusedAdvice.NPU_FUSED_ADVICE, + template_dir="templates", + template_name="cluster_analysis.html", + cann_version=self.cann_version, + torch_version=self.torch_version, + result=result_for_html) diff --git a/profiler/advisor/analyzer/computation/npu_fused/npu_slow_advice.py b/profiler/advisor/analyzer/computation/npu_fused/npu_slow_advice.py new file mode 100644 index 0000000000..0dd0a3225f --- /dev/null +++ b/profiler/advisor/analyzer/computation/npu_fused/npu_slow_advice.py @@ -0,0 +1,124 @@ +# Copyright (c) 2023, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from abc import ABC +import multiprocessing + +import pandas as pd + +from profiler.advisor.analyzer.computation.npu_fused.compute_advice_base import ComputeAdviceBase +from profiler.advisor.analyzer.computation.npu_fused.op_perf import OpPerfFactory +from profiler.advisor.common import constant as const +from profiler.advisor.common.constant import PerfColor +from profiler.advisor.common.trace_view_json import TraceViewJson +from profiler.advisor.common.constant import Constant +from profiler.advisor.display.html.render import HTMLRender +from profiler.advisor.result.result import OptimizeResult +from profiler.advisor.result.item import OptimizeItem, OptimizeRecord + + +class NpuSlowAnalyzer(ComputeAdviceBase, ABC): + OP_PERF_SHEET = "op_perf" + npu_slow_advice = "NPU_SLOW_ADVICE" + + def __init__(self, collection_path: str, n_processes: int = 1, cann_version=const.DEFAULT_CANN_VERSION, + torch_version=const.DEFAULT_TORCH_VERSION, **kwargs): + super().__init__(collection_path, n_processes, cann_version, torch_version, **kwargs) + self.kernel_details_path = "" + self.data = pd.DataFrame() + + @staticmethod + def save_to_excel(data: pd.DataFrame, file_path: str) -> None: + writer = pd.ExcelWriter(file_path, engine="xlsxwriter", mode="w") + data.index.name = Constant.TITLE.INDEX + data.to_excel(writer, index=True, sheet_name=NpuSlowAnalyzer.OP_PERF_SHEET) + NpuSlowAnalyzer.color_sheet(data, writer.book, writer.sheets[NpuSlowAnalyzer.OP_PERF_SHEET]) + writer.sheets[NpuSlowAnalyzer.OP_PERF_SHEET].freeze_panes = "A2" + writer.close() + + @staticmethod + def color_sheet(data: pd.DataFrame, workbook, worksheet): + color_rgb = { + PerfColor.GREEN.name: workbook.add_format({'bg_color': '#C6EFCE'}), + PerfColor.YELLOW.name: workbook.add_format({'bg_color': '#FFEB9C'}), + PerfColor.RED.name: workbook.add_format({'bg_color': '#FFC7CE'}), + } + for row in data.iterrows(): + color = row[1][Constant.TITLE.COLOR] + fill_format = color_rgb.get(color) + if not fill_format: + continue + worksheet.set_row(row[0] + 1, None, fill_format) + + @staticmethod + def update_op_row(row: tuple): + return OpPerfFactory.build(row[1]).update() + + def get_call_stack(self, data: pd.DataFrame, index_id: int, ts_col: str) -> str: + if not self.has_callstack(): + print("There is no call stack info, please set 'with_stack=True'") + return "" + trace_json = TraceViewJson(self.trace_view_path) + return trace_json.get_call_stack(data, index_id, ts_col) + + def run(self): + if not self.path_check(): + return self.data + self.process() + return self.data + + def process(self): + self.data = pd.read_csv(self.kernel_details_path, dtype={"Start Time(us)": str}) + # 去除末尾的\t分隔符 + self.data["Start Time(us)"] = self.data["Start Time(us)"].apply(lambda x: x[:-1]) + pool = multiprocessing.Pool(multiprocessing.cpu_count()) + result = pool.map(self.update_op_row, self.data.iterrows()) + pool.close() + self.data = pd.DataFrame(result) + + def optimize(self): + if not self.path_check(): + return self.data + self.process() + + def make_record(self): + """ + make record for what and how to optimize + """ + optimization_item = OptimizeItem( + NpuSlowAnalyzer.npu_slow_advice, + self.bottleneck_str, + self.cur_advices + ) + self.result.add(OptimizeRecord(optimization_item)) + + # self.result.add_detail(const.BOTTLENECK, self.bottleneck_table["headers"], self.bottleneck_table["data"][0]) + # for data_type, data in self.cur_data.items(): + # if data: + # self.result.add_detail(const.DATA + data_type, self.cur_data_table[data_type]["headers"], self.cur_data_table[data_type]["data"][0]) + + def make_render(self): + result_for_html = { + "Description" : self.cur_bottleneck, + "suggestion" : self.cur_advice, + "details" : [{}] + } + + self.html_render.render_template(key="cluster", + title=NpuSlowAnalyzer.npu_slow_advice, + template_dir="templates", + template_name="cluster_analysis.html", + cann_version=self.cann_version, + torch_version=self.torch_version, + result=result_for_html) diff --git a/profiler/advisor/analyzer/computation/npu_fused/op_perf.py b/profiler/advisor/analyzer/computation/npu_fused/op_perf.py new file mode 100644 index 0000000000..dbcaad8c1c --- /dev/null +++ b/profiler/advisor/analyzer/computation/npu_fused/op_perf.py @@ -0,0 +1,193 @@ +# Copyright (c) 2023, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import functools +from typing import Dict +from profiler.advisor.common.constant import Constant, CoreType, PerfColor + + +class OpPerfFactory: + @classmethod + def build(cls, op_row: Dict): + if op_row.get(Constant.TITLE.TASK_TYPE) == CoreType.AIV: + return VecOpPerf(op_row) + elif op_row.get(Constant.TITLE.TASK_TYPE) == CoreType.AIC: + return CubeOpPerf(op_row) + else: + return OpPerf(op_row) + + +class OpPerf: + def __init__(self, op_row: Dict): + if "OP Type" in op_row.keys(): + Constant.update_title() + self.row = op_row + self.model_name = op_row.get("Model Name") + self.model_id = op_row.get("Model ID") + self.task_id = op_row.get("Task ID") + self.stream_id = op_row.get("Stream ID") + self.infer_id = op_row.get("Infer ID") + self.op_name = op_row.get("Name") + self.op_type = op_row.get("Type") + self.task_type = op_row.get("Accelerator Core") + self.task_start_time = op_row.get("Start Time(us)") + self.task_duration = op_row.get("Duration(us)") + self.task_wait_time = op_row.get("Wait Time(us)") + self.block_dim = op_row.get("Block Dim") + self.mix_block_dim = op_row.get("Mix Block Dim") + + self.hf32_eligible = op_row.get("HF32 Eligible") + self.input_shapes = op_row.get("Input Shapes") + self.input_data_types = op_row.get("Input Data Types") + self.input_formats = op_row.get("Input Formats") + self.output_shapes = op_row.get("Output Shapes") + self.output_data_types = op_row.get("Output Data Types") + self.output_formats = op_row.get("Output Formats") + self.context_id = op_row.get("Context ID") + self.aicore_time = op_row.get("aicore_time(us)") + self.aic_total_cycles = op_row.get("aic_total_cycles") + + self.aic_mac_time = op_row.get("aic_mac_time(us)") + self.aic_mac_ratio = op_row.get("aic_mac_ratio") + self.aic_scalar_time = op_row.get("aic_scalar_time(us)") + self.aic_scalar_ratio = op_row.get("aic_scalar_ratio") + self.aic_mte1_time = op_row.get("aic_mte1_time(us)") + self.aic_mte1_ratio = op_row.get("aic_mte1_ratio") + self.aic_mte2_time = op_row.get("aic_mte2_time(us)") + self.aic_mte2_ratio = op_row.get("aic_mte2_ratio") + self.aic_fixpipe_time = op_row.get("aic_fixpipe_time(us)") + self.aic_fixpipe_ratio = op_row.get("aic_fixpipe_ratio") + self.aic_icache_miss_rate = op_row.get("aic_icache_miss_rate") + self.aiv_time = op_row.get("aiv_time(us)") + self.aiv_total_cycles = op_row.get("aiv_total_cycles") + self.aiv_vec_time = op_row.get("aiv_vec_time(us)") + self.aiv_vec_ratio = op_row.get("aiv_vec_ratio") + self.aiv_scalar_time = op_row.get("aiv_scalar_time(us)") + self.aiv_scalar_ratio = op_row.get("aiv_scalar_ratio") + self.aiv_mte2_time = op_row.get("aiv_mte2_time(us)") + + self.aiv_mte2_ratio = op_row.get("aiv_mte2_ratio") + self.aiv_mte3_time = op_row.get("aiv_mte3_time(us)") + self.aiv_mte3_ratio = op_row.get("aiv_mte3_ratio") + self.aiv_icache_miss_rate = op_row.get("aiv_icache_miss_rate") + self.cube_utilization = op_row.get("cube_utilization( %)") + + @staticmethod + def get_dtype_size(dtype_str: str): + return Constant.DTYPE_SIZE_MAP.get(dtype_str.lower(), 0) + + @staticmethod + def get_element_count(shape: list): + return functools.reduce(lambda x, y: int(x) * int(y), shape) + + @staticmethod + def shape_to_tuple(shape_str: str) -> tuple: + if not isinstance(shape_str, str): + return [] + shape_str = shape_str.strip('"') + split_shape = shape_str.strip(';') + if not split_shape: + return [] + pairs = split_shape.split(';') + shape_result = [] + for pair in pairs: + pair = pair.strip(";") + elements = pair.split(',') + elements = tuple(int(element) if "" != element else 0 for element in elements) + shape_result.append(elements) + return tuple(shape_result) + + @staticmethod + def dtype_to_tuple(dtypes_str: str) -> tuple: + if not isinstance(dtypes_str, str): + return [] + dtypes_str = dtypes_str.strip('"') + split_dtypes = dtypes_str.strip(';') + if not split_dtypes: + return [] + pairs = split_dtypes.split(';') + return tuple(pairs) + + def get_mac_ratio(self): + return self.aic_mac_ratio + + def get_size(self, shapes_str, dtypes_str): + shapes = self.shape_to_tuple(shapes_str) + dtypes = self.dtype_to_tuple(dtypes_str) + if len(shapes) > len(dtypes): + print(f"[ERROR] The size of shape is greater than that of dtypes.") + return 0 + if len(shapes) < len(dtypes): + shapes = list(shapes) + shapes.extend([(1,)] * (len(dtypes) - len(shapes))) + all_size = 0 + for index, shape in enumerate(shapes): + element_count = self.get_element_count(shape) + dtype_size = self.get_dtype_size(dtypes[index]) + all_size += element_count * dtype_size + return all_size + + def get_calc_size(self): + # input and output bytes (MB) + if not self.input_shapes or not self.output_shapes: + print("[ERROR] There is no tensor data, do not assess vector op performance.") + return 0 + intput_size = self.get_size(self.input_shapes, self.input_data_types) + output_size = self.get_size(self.output_shapes, self.output_data_types) + return (intput_size + output_size) / (Constant.BYTE_UNIT_TRANS * Constant.BYTE_UNIT_TRANS) + + def get_throughput(self): + # throughput(GB/s) + if not self.task_duration or abs(self.task_duration) < 1e-6: + print("[ERROR] There is no task_duration, do not assess vector op performance.") + return 0 + return self.row[Constant.TITLE.SIZE] / Constant.BYTE_UNIT_TRANS / self.task_duration * Constant.UNIT_TRANS * Constant.UNIT_TRANS + + def get_perf_color(self): + return PerfColor.WHITE + + def update(self): + self.row[Constant.TITLE.SIZE] = self.get_calc_size() + self.row[Constant.TITLE.THROUGHPUT] = self.get_throughput() + self.row[Constant.TITLE.COLOR] = self.get_perf_color().name + return self.row + + +class VecOpPerf(OpPerf): + def get_perf_color(self) -> PerfColor: + throughput = self.row[Constant.TITLE.THROUGHPUT] + op_duration = self.task_duration + tp_threshold = Constant.TP_THRESHOLD + if throughput == 0: + return PerfColor.WHITE + if throughput < tp_threshold / 2 and op_duration > 20: + return PerfColor.RED + elif tp_threshold / 2 <= throughput < tp_threshold: + return PerfColor.YELLOW + else: + return PerfColor.GREEN + + +class CubeOpPerf(OpPerf): + def get_perf_color(self) -> PerfColor: + aic_mac_ratio = self.get_mac_ratio() + if not aic_mac_ratio: + print("[WARNING] There is no aic_mac_ratio, do not assess cube op performance.") + return PerfColor.WHITE + elif aic_mac_ratio < 0.6: + return PerfColor.RED + elif 0.6 <= aic_mac_ratio < 0.8: + return PerfColor.YELLOW + else: + return PerfColor.GREEN diff --git a/profiler/advisor/analyzer/overall/overall_summary_analyzer.py b/profiler/advisor/analyzer/overall/overall_summary_analyzer.py new file mode 100644 index 0000000000..f46bb2c1d5 --- /dev/null +++ b/profiler/advisor/analyzer/overall/overall_summary_analyzer.py @@ -0,0 +1,246 @@ +# Copyright (c) 2024, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os + +import logging +from typing import Dict, List + +from profiler.advisor.display.html.render import HTMLRender +from profiler.advisor.result.result import OptimizeResult +from profiler.advisor.result.item import OptimizeItem, OptimizeRecord +from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.compare_tools.compare_backend.utils.constant import Constant +from profiler.advisor.common import constant as const +from profiler.compare_tools.compare_interface.comparison_interface import ComparisonInterface + + +class OverallSummaryAnalyzer(BaseAnalyzer): + OVERALL_SUMMARY_ANALYZER = "overall_summary_analysis" + advice_map = { + "Computing Time": "if you want more detailed advice please go to compute_perf_analysis.ipynb.", + "Uncovered Communication Time": "if you want more detailed advice please go to cluster_perf_analysis.ipynb.", + "Free Time": "if you want more detailed advice please go to timeline_perf_analysis.ipynb." + } + time_name_map = { + "Computing Time": "computing", + "Uncovered Communication Time(Wait Time)": "communication", + "Free Time": "free", + 'Cube Time(Num)': 'Cube Time', + 'Vector Time(Num)': 'Vector Time', + 'Flash Attention Time(Forward)(Num)': 'Flash Attention Time(Forward)', + 'Flash Attention Time(Backward)(Num)': 'Flash Attention Time(Backward)', + 'Other Time': "Other Computing Time", + 'SDMA Time(Num)': 'SDMA Time' + } + performance_time_dict = { + "Computing Time": ['Cube Time(Num)', 'Vector Time(Num)', 'Flash Attention Time(Forward)(Num)', + 'Flash Attention Time(Backward)(Num)', 'Other Time'], + "Uncovered Communication Time(Wait Time)": [], + "Free Time": ['SDMA Time(Num)'] + } + + def __init__(self, collection_path: str, n_processes: int = 1, cann_version=const.DEFAULT_CANN_VERSION, + torch_version=const.DEFAULT_TORCH_VERSION, **kwargs): + super().__init__(collection_path, n_processes, cann_version, torch_version, **kwargs) + self.base_collection_path = kwargs.get("base_collection_path", "") + self._has_base_collection = False + self._is_minimal_profiling = False + self.cur_data = {} + self.cur_data_table = {} + self.cur_bottleneck = {} + self.cur_advices = "" + self._headers = [] + self._base_data = [] + self._comparison_data = [] + self.html_render = HTMLRender() + self.result = OptimizeResult() + self.bottleneck_str = "" + self.bottleneck_table = {} + + @staticmethod + def split_duration_and_num(time_value: str) -> tuple: + split_data = time_value.split("s") # time value example: 0.229s(1756) + duration, num = 0.0, None + if len(split_data) >= 2: + try: + num = int(split_data[1].strip("()")) + except ValueError: + pass + if len(split_data) >= 1: + try: + duration = float(split_data[0]) + except ValueError: + print(f"[WARNING] Invalid time value: {time_value}.") + return duration, num + + @staticmethod + def calculate_ratio(dividend, divisor): + if not divisor: + return float("inf") + return dividend / divisor + + def path_check(self): + if self.base_collection_path: + if os.path.exists(self.base_collection_path): + self._has_base_collection = True + else: + print(f"[WARNING] Invalid path which not exists: {self.base_collection_path}.") + return os.path.exists(self.collection_path) + + def process(self): + base_collection_path = self.base_collection_path if self._has_base_collection else self.collection_path + result_data = ComparisonInterface(base_collection_path, self.collection_path).compare(Constant.OVERALL_COMPARE) + for data in result_data.values(): + self._headers = data.get("headers", []) + rows = data.get("rows", []) + if len(rows) == 2: + self._base_data = rows[0] + self._comparison_data = rows[1] + if not self._headers or not self._comparison_data: + return + self._is_minimal_profiling = 'E2E Time(Not minimal profiling)' not in self._headers + if self._has_base_collection: + self.cur_data["comparison_result"] = result_data + time_category_dict = {} + for time_category, time_list in self.performance_time_dict.items(): + time_value = self.get_time_value(time_category, self._comparison_data) + if time_value == Constant.INVALID_VALUE: + continue + duration, _ = self.split_duration_and_num(time_value) + time_category = time_category.split("(")[0] + time_category_dict[time_category] = duration + self.get_sub_category_time(time_category, time_list, duration) + self.cur_data["overall_data"] = time_category_dict + + def get_time_value(self, header_name: str, data_list: list): + try: + data_index = self._headers.index(header_name) + except ValueError: + return Constant.INVALID_VALUE + try: + time_value = data_list[data_index] + except IndexError: + return Constant.INVALID_VALUE + return time_value + + def get_sub_category_time(self, category: str, time_list: list, total_duration: float): + sub_time_dict = {} + for time_name in time_list: + time_value = self.get_time_value(time_name, self._comparison_data) + if time_value == Constant.INVALID_VALUE: + continue + sub_time_dict.setdefault(f"{category} Subtype", []).append(self.time_name_map.get(time_name, "")) + duration, num = self.split_duration_and_num(time_value) + sub_time_dict.setdefault(f"Duration(s)", []).append(duration) + sub_time_dict.setdefault(f"Duration Ratio", []).append( + "{:.2%}".format(self.calculate_ratio(duration, total_duration))) + sub_time_dict.setdefault(f"Kernel Number", []).append(num) + self.cur_data[self.time_name_map.get(category)] = sub_time_dict + + def identify_bottleneck(self): + overall_data = self.cur_data.get("overall_data") + if not overall_data: + return + e2e_time = '%.3f' % sum([data for data in overall_data.values()]) + overall_bottleneck = f"The Model E2E Time is {e2e_time}s.\n" + comparison_bottleneck = "" + for time_type, time_value in overall_data.items(): + # add subtype time bottleneck + advice = self.advice_map.get(time_type, "") + self.cur_bottleneck[self.time_name_map.get(time_type)] = f"{time_type} is {time_value}s.\n{advice}" + # add overall bottleneck + overall_bottleneck += f" -- {time_type} is {time_value}s\n" + if time_type == "Free Time" and self._is_minimal_profiling and self.calculate_ratio(time_value, + e2e_time) > 0.1: + overall_bottleneck += "percentage of free time exceed the threshold 10%." + if not self._has_base_collection: + continue + # add comparison bottleneck + time_type_origin = "Uncovered Communication Time(Wait Time)" \ + if time_type == "Uncovered Communication Time" else time_type + base_duration, _ = self.split_duration_and_num(self.get_time_value(time_type_origin, self._base_data)) + if time_value > base_duration: + ratio = "{:.2%}".format(self.calculate_ratio(time_value - base_duration, base_duration)) + comparison_bottleneck += f"{time_type} exceeds the benchmark by {ratio}\n" + self.cur_bottleneck["overall_data"] = overall_bottleneck + self.cur_bottleneck["comparison_result"] = comparison_bottleneck + + def optimize(self): + if self.path_check(): + self.process() + self.identify_bottleneck() + self.format_bottleneck() + self.format_cur_data() + self.make_record() + self.make_render() + return self.result + + def format_bottleneck(self): + result = '' + headers = [] + data_list = [] + data = [] + for key, value in self.cur_bottleneck.items(): + result += f'{key}: {value} \n' + headers.append(key) + data.append(value) + data_list.append(data) + self.bottleneck_str = result + self.bottleneck_table["headers"] = headers + self.bottleneck_table["data"] = data_list + + def format_cur_data(self): + if not self.cur_data: + return + data_table = {} + for data_type, data in self.cur_data.items(): + if data: + headers = [key for key in data] + data_list = [data[key] for key in data] + data_table["headers"] = headers + data_table["data"] = [data_list] + self.cur_data_table[data_type] = data_table + + + def make_record(self): + """ + make record for what and how to optimize + """ + optimization_item = OptimizeItem( + OverallSummaryAnalyzer.OVERALL_SUMMARY_ANALYZER, + self.bottleneck_str, + self.cur_advices + ) + self.result.add(OptimizeRecord(optimization_item)) + + self.result.add_detail(const.BOTTLENECK, self.bottleneck_table["headers"], self.bottleneck_table["data"][0]) + for data_type, data in self.cur_data.items(): + if data: + self.result.add_detail(const.DATA + data_type, self.cur_data_table[data_type]["headers"], self.cur_data_table[data_type]["data"][0]) + + def make_render(self): + result_for_html = { + "Description" : self.bottleneck_str, + "suggestion" : self.cur_advices, + "details" : [self.bottleneck_table] + } + + self.html_render.render_template(key="cluster", + title=OverallSummaryAnalyzer.OVERALL_SUMMARY_ANALYZER, + template_dir="templates", + template_name="cluster_analysis.html", + cann_version=self.cann_version, + torch_version=self.torch_version, + result=result_for_html) diff --git a/profiler/advisor/common/constant.py b/profiler/advisor/common/constant.py index 664753c724..1d23b1d9ac 100644 --- a/profiler/advisor/common/constant.py +++ b/profiler/advisor/common/constant.py @@ -1,3 +1,229 @@ +# Copyright (c) 2023, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from enum import Enum + + +class CsvTitle: + MODEL_NAME = "Model Name" + MODEL_ID = "Model ID" + TASK_ID = "Task ID" + STREAM_ID = "Stream ID" + INFER_ID = "Infer ID" + TASK_START_TIME = "Task Start Time(us)" + TASK_WAIT_TIME = "Task Wait Time(us)" + BLOCK_DIM = "Block Dim" + MIX_BLOCK_DIM = "Mix Block Dim" + HF32_ELIGIBLE = "HF32 Eligible" + INPUT_SHAPES = "Input Shapes" + INPUT_DATA_TYPES = "Input Data Types" + INPUT_FORMATS = "Input Formats" + OUTPUT_SHAPES = "Output Shapes" + OUTPUT_DATA_TYPES = "Output Data Types" + OUTPUT_FORMATS = "Output Formats" + CONTEXT_ID = "Context ID" + AICORE_TIME = "aicore_time(us)" + AIC_TOTAL_CYCLES = "aic_total_cycles" + AIC_MAC_TIME = "aic_mac_time(us)" + AIC_MAC_RATIO = "aic_mac_ratio" + AIC_SCALAR_TIME = "aic_scalar_time(us)" + AIC_SCALAR_RATIO = "aic_scalar_ratio" + AIC_MTE1_TIME = "aic_mte1_time(us)" + AIC_MTE1_RATIO = "aic_mte1_ratio" + AIC_MTE2_TIME = "aic_mte2_time(us)" + AIC_MTE2_RATIO = "aic_mte2_ratio" + AIC_FIXPIPE_TIME = "aic_fixpipe_time(us)" + AIC_FIXPIPE_RATIO = "aic_fixpipe_ratio" + AIC_ICACHE_MISS_RATE = "aic_icache_miss_rate" + AIV_TIME = "aiv_time(us)" + AIV_TOTAL_CYCLES = "aiv_total_cycles" + AIV_VEC_TIME = "aiv_vec_time(us)" + AIV_VEC_RATIO = "aiv_vec_ratio" + AIV_SCALAR_TIME = "aiv_scalar_time(us)" + AIV_SCALAR_RATIO = "aiv_scalar_ratio" + AIV_MTE2_TIME = "aiv_mte2_time(us)" + AIV_MTE2_RATIO = "aiv_mte2_ratio" + AIV_MTE3_TIME = "aiv_mte3_time(us)" + AIV_MTE3_RATIO = "aiv_mte3_ratio" + AIV_ICACHE_MISS_RATE = "aiv_icache_miss_rate" + CUBE_UTILIZATION = "cube_utilization( %)" + TASK_DURATION_SUM = "Task Duration Sum(us)" + TASK_DURATION_MEAN = "Task Duration Mean(us)" + TASK_DURATION_STD = "Task Duration Std(us)" + TASK_DURATION_RATIO = "Task Duration Ratio(100%)" + SIZE = "size(MB)" + THROUGHPUT = "throughput(GB/s)" + COLOR = "color" + GAP = "Gap(us)" + DURATION_SUM = "Duration Sum(us)" + COUNT = "Count" + MAX_DURATION = "Max Duration(us)" + MIN_DURATION = "Min Duration(us)" + AVG_DURATION = "Avg Duration(us)" + DURATION_RATIO = "Duration Ratio" + INDEX = "Index" + + +# 定义CSV_TITILE_V1类,继承自CSV_TITILE类, 适配旧版csv +class CsvTitleV1(CsvTitle): + OP_NAME = "Op Name" + OP_TYPE = "OP Type" + TASK_TYPE = "Task Type" + TASK_DURATION = "Task Duration(us)" + + +# 定义CSV_TITILE_V1类,继承自CSV_TITILE类, 适配新版csv +class CsvTitleV2(CsvTitle): + OP_NAME = "Name" + OP_TYPE = "Type" + TASK_TYPE = "Accelerator Core" + TASK_DURATION = "Duration(us)" + + +class Constant: + DTYPE_SIZE_MAP = {"int8": 1, "uint8": 1, + "int16": 2, "uint16": 2, + "int32": 4, "uint32": 4, + "int64": 8, "uint64": 8, + "float16": 2, + "bfloat16": 2, + "bf16": 2, + "dt_bf16": 2, + "float32": 4, + "float": 4, + "float64": 8, + "complex64": 8, + "complex128": 16, + "bool": 1} + TP_THRESHOLD = 1150 + MAX_INPUT_MODE_LEN = 30 + MAX_INPUT_ADVICE_LEN = 30 + SMALL_OP_DUR_RATIO = 0.2 + SMALL_OP_NUM_RATIO = 0.2 + BYTE_UNIT_TRANS = 1024 + UNIT_TRANS = 1000 + + # mode list + COMPUTE = "compute" + TIMELINE = "timeline" + CLUSTER = "cluster" + OVERALL = "overall" + PIPELINE = "pipeline" + + # advice list + SLOW_RANK = "slow rank" + SLOW_LINK = "slow link" + KERNEL = "kernel" + + # compute + NPU_FUSED = "npu_fused" + NPU_SLOW = "npu_slow" + + # timeline + OPTIM = "optimizer" + OP_SCHE = "op_schedule" + + # overall + SUMMARY = "summary" + + PT_PROF_SUFFIX = "ascend_pt" + ASCEND_PROFILER_OUTPUT = "ASCEND_PROFILER_OUTPUT" + COLLECTION_PATH = "collection_path" + CLUSTER_ANALYSIS_OUTPUT = "cluster_analysis_output" + KERNEL_DETAILS_CSV = "kernel_details.csv" + CLUSTER_STEP_TIME_CSV = "cluster_step_trace_time.csv" + CLUSTER_COMM_JSON = "cluster_communication.json" + + # pipline + OP_NAME = "name" + OP_TID = "tid" + PID = "pid" + TS = "ts" + DUR = "dur" + CAT = "cat" + ARGS = "args" + PH = "ph" + ID = "id" + PH_START = "s" + PH_BEGIN = "B" + PH_END = "E" + PH_META = "M" + PH_X = "X" + CNAME = "cname" + PROCESS_NAME = "process_name" + FRAMEWORK_NAME = "Python" + ASCEND_HARDWARE_NAME = "Ascend Hardware" + ASYNC_NPU = "async_npu" + STEP_PREFIX = "ProfilerStep#" + FP_ATEN_OP = "aten" + FP_C10D_OP = "c10d" + HCOM_OP_PREFIX = "hcom_" + BP_AUTOGRAD_OP = "autograd" + TRACE_VIEW_JSON = "trace_view.json" + + # pattern_dict key: pattern, value: pattern name + PATTERN_DICT = {("Add", "DropOutDoMask", "Add"): "bias_dropout_add", + ("BatchMatMul", "Mul", "Cast", "Mul", "MaskedFill", "SoftmaxV2", "Cast", "DropOutDoMask", + "AsStrided", "BatchMatMul", "Transpose"): "FA", + ("Transpose", "Transpose", "Transpose", "Mul", "Transpose", "BatchMatMulV2", "MaskedFill", + "Cast", "SoftmaxV2", "Cast", "DropOutDoMask", "BatchMatMulV2", "Transpose"): "FA", + ("Transpose", "BatchMatMulV2", "Transpose", "Transpose", "BatchMatMulV2", "ZerosLike", + "DropOutDoMask", "Cast", "SoftmaxGrad", "Cast", "MaskedFill", "BatchMatMulV2", + "BatchMatMulV2", "Mul"): "FA", + ("Cast", "Square", "ReduceMeanD", "Add", "Rsqrt", "Cast", "Cast", "Mul", "Cast", "Cast", + "Mul", "Cast"): "RMSNORM", + ("Cast", "LayerNorm", "Cast"): "LayerNorm", + ("Add", "LayerNorm"): "AddLayerNorm", + ("Add", "LayerNormV3"): "AddLayerNorm", + ("Gelu", "Add"): "GeluAdd", + ("Cast", "Square", "MemSet", "ReduceMean", "Add", "Rsqrt", "Mul", "Cast", "Mul"): "RMSNorm", + ("BatchMatMul", "RealDiv", "Add", "Maximum", "SoftmaxV2", "Cast", "BatchMatMul"): "FA", + ("BatchMatMulV2", "RealDiv", "Add", "Cast", "Maximum", "Cast", "SoftmaxV2", "AsStrided", + "BatchMatMulV2"): "FA", + ("BatchMatMulV2", "RealDiv", "Add", "Cast", "SoftmaxV2", "Cast", "BroadcastTo", + "BatchMatMulV2"): "FA", + ("Mul", "Slice", "Neg", "Slice", "ConcatD", "Cast", "Mul", "Add"): "RotaryMul", + ("Mul", "AsStrided", "Neg", "AsStrided", "ConcatD", "Mul", "Add"): "RotaryMul", + ("Mul", "Slice", "Neg", "Slice", "ConcatD", "Mul", "Add"): "RotaryMul", + ("MatMulV2", "Swish", "MatMulV2", "Mul", "MatMulV2"): "FFN", + ("Transpose", "Transpose", "GatherElement", "Transpose"): "GatherElement", + ("Slice", "Slice", "Swish", "Mul"): "torch_npu.npu_swiglu", + ("Cast", "Mul", "MaskedFill", "SoftmaxV2", "Cast"): "torch_npu.npu_scaled_masked_softmax", + ("Mul", "Slice", "Neg", "Slice", "ConcatD", "Mul"): "torch_npu.npu_rotary_mul", + ("Cast", "Square", "ReduceMeanD", "Add", "Rsqrt", "Mul", "Cast", "Mul"): "torch_npu.npu_rms_norm"} + TITLE = CsvTitleV2 + + @classmethod + def update_title(cls): + cls.TITLE = CsvTitleV1 + + +class CoreType: + AIV = "AI_VECTOR_CORE" + AIC = "AI_CORE" + AICPU = "AI_CPU" + MIX_AIV = "MIX_AIV" + MIX_AIC = "MIX_AIC" + HCCL = "HCCL" + + +class PerfColor(Enum): + WHITE = 0 + GREEN = 1 + YELLOW = 2 + RED = 3 + # timeline DEQUEUE = "Dequeue" DEQUEUE_SEP = "@" @@ -120,3 +346,6 @@ CLUSTER_ANALYSIS_OUTPUT = "cluster_analysis_output" KERNEL_DETAILS_CSV = "kernel_details.csv" CLUSTER_STEP_TIME_CSV = "cluster_step_trace_time.csv" CLUSTER_COMM_JSON = "cluster_communication.json" + +BOTTLENECK = "bottleneck" +DATA = "data" \ No newline at end of file diff --git a/profiler/advisor/common/trace_view_json.py b/profiler/advisor/common/trace_view_json.py new file mode 100644 index 0000000000..8171f06ee2 --- /dev/null +++ b/profiler/advisor/common/trace_view_json.py @@ -0,0 +1,209 @@ +# Copyright (c) 2024, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +from abc import abstractmethod +from dataclasses import dataclass +from dataclasses import field +from typing import Dict +from typing import List + +import pandas as pd + +from common_func.file_manager import FileManager + + +@dataclass +class TraceObj: + ph: str = "" + bp: str = "" + cat: str = "" + name: str = "" + pid: int = 0 + tid: int = 0 + id: int = 0 + ts: str = "" + dur: float = 0.0 + args: dict = field(default='unknown') + + @abstractmethod + def hash(self): + raise Exception("To be implemented") + + def valid(self): + return self.name != "" + + def check_hashable(self): + if not self.valid(): + raise Exception("Illegal {} to hash".format(self.__class__.name)) + + +@dataclass +class Process(TraceObj): + def hash(self): + self.check_hashable() + # msprof 保证name唯一性 + return self.args.get("name") + + +@dataclass +class Thread(TraceObj): + def hash(self): + self.check_hashable() + # msprof 保证name唯一性 + return self.args.get("name") + + +@dataclass +class DurationEvent(TraceObj): + def hash(self): + self.check_hashable() + return self.ts + + +@dataclass +class FlowEvent(TraceObj): + s_point_ts: str = "" + e_point_ts: str = "" + + def hash(self): + self.check_hashable() + return self.e_point_ts + + +class TraceViewJson: + + def __init__(self, path): + self.processes: Dict[str, Process] = dict() + self.threads: Dict[str, Thread] = dict() + self.python_dur_events: Dict[str, DurationEvent] = dict() + self.cann_dur_events: Dict[str, DurationEvent] = dict() + self.ascend_hardware_dur_events: Dict[str, DurationEvent] = dict() + self.torch_2_npu_flow_events: Dict[str, FlowEvent] = dict() + traces = FileManager.read_json_file(path) + self._load_obj(traces) + + def get_call_stack(self, data: pd.DataFrame, index_id: int, ts_col: str) -> str: + if ts_col not in data.columns.tolist(): + print("[ERROR] No {} col found in data columns.".format(ts_col)) + return "" + row = data.loc[index_id] + timestamp = row[ts_col] + flow_event = self.get_torch_2_npu_flow_event(timestamp) + if not flow_event.valid(): + print("[ERROR] Get flow event failed for pattern {}.".format(row['pattern'])) + return "" + flow_event_s_key = flow_event.s_point_ts + python_dur_events = self.get_python_dur_events_contain_ts(flow_event_s_key) + if not python_dur_events: + print("[ERROR] No python dur event found for pattern {}.".format(row['pattern'])) + return "" + # 保持新老版本callstack兼容性 + if python_dur_events[0].args.get("Call stack"): + # 旧版本 + call_stack_list = python_dur_events[0].args.get("Call stack").split(";") + else: + python_dur_events.sort(key=lambda e: e.ts) + # 新版本 + call_stack_list = [event.name for event in python_dur_events if event.cat == "python_function"] + call_stack = "\n".join(call_stack_list) + return call_stack + + def get_torch_2_npu_flow_event(self, end_time) -> FlowEvent: + if not self.torch_2_npu_flow_events or not self.torch_2_npu_flow_events.get(end_time): + print("[ERROR] Find flow event failed for ts: {}".format(end_time)) + return FlowEvent() + return self.torch_2_npu_flow_events.get(end_time) + + def get_python_dur_events_contain_ts(self, ts) -> List[DurationEvent]: + res = [] + for event in self.python_dur_events.values(): + if float(event.ts) <= float(ts) <= float(event.ts) + event.dur: + res.append(event) + return res + + def _load_obj(self, traces): + self._load_format(traces) + if not self._check_format(): + print("[ERROR] parse json failed for error format") + return + self._load_duration_events(traces) + self._load_torch_to_npu_flow_events(traces) + + def _check_format(self): + # 当前功能只需要这两个process,可扩展 + check_processes = ['Python', 'Ascend Hardware'] + for check_process in check_processes: + if check_process in self.processes: + continue + print("[ERROR] {} process not found in json.".format(check_process)) + return False + return True + + # 加载pid, tid头 + def _load_format(self, traces: List[Dict]): + for i, trace in enumerate(traces): + if trace.get('name') == 'process_name': + if not trace.get('args') or not trace.get('args').get('name') or not trace.get('pid'): + continue + process = Process(**trace) + self.processes[process.hash()] = process + if trace.get('name') == 'thread_name': + if not trace.get('args') or not trace.get('args').get('name') or not trace.get('tid'): + continue + thread = Thread(**trace) + self.threads[thread.hash()] = thread + + def _load_duration_events(self, traces: List[Dict]): + def check_events(_trace): + return _trace.get('name') and _trace.get("ts") and _trace.get("dur") + + python_pid = self.processes.get("Python").pid + cann_pid = self.processes.get("CANN").pid + ascend_hardware_pid = self.processes.get("Ascend Hardware").pid + for i, trace in enumerate(traces): + if trace.get('ph') != 'X': + continue + if not check_events(trace): + continue + event = DurationEvent(**trace) + if trace.get('pid') == python_pid: + self.python_dur_events[event.hash()] = event + elif trace.get('pid') == cann_pid: + self.cann_dur_events[event.hash()] = event + elif trace.get("pid") == ascend_hardware_pid: + self.ascend_hardware_dur_events[event.hash()] = event + + def _load_torch_to_npu_flow_events(self, traces: List[Dict]): + def check_events(_trace): + return _trace.get('name') and _trace.get("id") and _trace.get("ts") + + flow_events_table_by_id = dict() + + python_pid = self.processes.get("Python") + for i, trace in enumerate(traces): + if trace.get('ph') != 's' and trace.get('ph') != 'f' and trace.get('pid') != python_pid: + continue + if not check_events(trace): + continue + event = flow_events_table_by_id.get(trace.get("id")) + if not event: + event = FlowEvent(**trace) + if trace.get('ph') == 's': + event.s_point_ts = trace.get('ts') + else: + event.e_point_ts = trace.get('ts') + flow_events_table_by_id[event.id] = event + + self.torch_2_npu_flow_events = {eve.hash(): eve for eve in flow_events_table_by_id.values()} diff --git a/profiler/advisor/common/trace_view_preprocessor.py b/profiler/advisor/common/trace_view_preprocessor.py new file mode 100644 index 0000000000..14a13066f6 --- /dev/null +++ b/profiler/advisor/common/trace_view_preprocessor.py @@ -0,0 +1,208 @@ +# Copyright (c) 2023, Huawei Technologies Co., Ltd. +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +import sys +from typing import Optional +from dataclasses import dataclass + +from profiler.advisor.common.constant import Constant + + +@dataclass +class FineTraceViewData: + py_pid: int = -1 + fp_tid: int = -1 + bp_tid: int = -1 + ascend_pid: int = -1 + min_ts: str = str(sys.maxsize) + max_ts: str = "0" + hcom_tids: list = None + fp_ops: list = None + bp_ops: list = None + hcom_ops: list = None + npu_ops_ts_dur: dict = None + torch_to_npu_links: list = None + + def __post_init__(self): + self.hcom_tids = self.hcom_tids or [] + self.fp_ops = self.fp_ops or [] + self.bp_ops = self.bp_ops or [] + self.hcom_ops = self.hcom_ops or [] + self.npu_ops_ts_dur = self.npu_ops_ts_dur or {} + self.torch_to_npu_links = self.torch_to_npu_links or [] + + def sort(self): + self.fp_ops.sort(key=lambda x: x[Constant.TS]) + self.bp_ops.sort(key=lambda x: x[Constant.TS]) + self.hcom_ops.sort(key=lambda x: x[Constant.TS]) + self.torch_to_npu_links.sort(key=lambda x: x[Constant.TS]) + + +class TraceViewPreProcessor: + """ + Trace view data preprocess + """ + + @staticmethod + def _is_fp_op(op_name: str) -> bool: + """ + check whether op is fp op + """ + return op_name.startswith(Constant.FP_ATEN_OP) or op_name.startswith(Constant.FP_C10D_OP) + + @staticmethod + def _is_fp_data(data: dict, fp_tid: int, py_pid: int) -> bool: + """ + check whether data is valid fp data + """ + return data[Constant.OP_TID] == fp_tid and \ + Constant.TS in data and Constant.DUR in data and \ + not data[Constant.OP_NAME].startswith(Constant.STEP_PREFIX) and \ + data[Constant.PID] == py_pid + + @staticmethod + def _is_bp_op(op_name: str) -> bool: + """ + check whether op is bp op + """ + return op_name.startswith(Constant.BP_AUTOGRAD_OP) + + @staticmethod + def _is_bp_data(data: dict, bp_tid: int, py_pid: int) -> bool: + """ + check whether data is valid bp data + """ + return data[Constant.OP_TID] == bp_tid and \ + Constant.TS in data and Constant.DUR in data and \ + data[Constant.PID] == py_pid + + @staticmethod + def _is_torch_to_npu_link(data: dict, fp_tid: int) -> bool: + """ + check whether data is torch to npu link + """ + return Constant.CAT in data and data[Constant.CAT] == Constant.ASYNC_NPU and \ + data[Constant.PH] == Constant.PH_START and \ + data[Constant.PID] == fp_tid + + @staticmethod + def _is_send_recv_op(op_name: str) -> bool: + """ + check whether op is hcom send or recv op + """ + # eg: hcom_BatchSendRecv__101_0_1 + p1 = re.compile(r'hcom_\w+SendRecv__\d+') + # eg: hcom_send__101_0_1 + p2 = re.compile(r'hcom_send__\d+') + # eg: hcom_receive__101_0_1 + p3 = re.compile(r'hcom_receive__\d+') + return bool(p1.match(op_name)) or bool(p2.match(op_name)) or bool(p3.match(op_name)) + + @staticmethod + def _is_hcom_op(op_name: str) -> bool: + """ + check whether data is hcom data + """ + return op_name.startswith(Constant.HCOM_OP_PREFIX) + + @staticmethod + def _is_python_process(data: dict) -> bool: + """ + check whether data is python process + """ + return Constant.PH in data and data[Constant.PH] == Constant.PH_META and \ + data[Constant.OP_NAME] == Constant.PROCESS_NAME and \ + data[Constant.ARGS][Constant.OP_NAME] == Constant.FRAMEWORK_NAME + + @staticmethod + def _is_step_op(data: dict) -> bool: + """ + check whether data is step data + """ + return data[Constant.OP_NAME].startswith(Constant.STEP_PREFIX) + + @staticmethod + def _is_ascend_process(data: dict) -> bool: + """ + check whether data is ascend process data + """ + return Constant.PH in data and data[Constant.PH] == Constant.PH_META and \ + data[Constant.OP_NAME] == Constant.PROCESS_NAME and \ + data[Constant.ARGS][Constant.OP_NAME] == Constant.ASCEND_HARDWARE_NAME + + @staticmethod + def _is_npu_op(data: dict, ascend_pid: int) -> bool: + """ + check whether data is npu op + """ + return Constant.PH in data and data[Constant.PH] == Constant.PH_X and \ + not data[Constant.OP_NAME].isupper() and \ + data[Constant.PID] == ascend_pid + + def process(self, raw_data: list) -> Optional[FineTraceViewData]: + """ + preprocess raw data + """ + if not raw_data: + print("[ERROR] No raw data found in trace view data.") + return None + + raw_fp_tids, raw_bp_tids, raw_hcom_tids = set(), set(), set() + fine_data = FineTraceViewData() + + # counting fp ops and bp ops tid and ascend pid + for data in raw_data: + if self._is_fp_op(data[Constant.OP_NAME]): + raw_fp_tids.add(data[Constant.OP_TID]) + elif self._is_bp_op(data[Constant.OP_NAME]): + raw_bp_tids.add(data[Constant.OP_TID]) + elif self._is_send_recv_op(data[Constant.OP_NAME]): + fine_data.hcom_ops.append(data) + raw_hcom_tids.add(data[Constant.OP_TID]) + elif self._is_python_process(data): + fine_data.py_pid = data[Constant.PID] + elif self._is_ascend_process(data): + fine_data.ascend_pid = data[Constant.PID] + + # find max and min ts in hcom ops + if self._is_hcom_op(data[Constant.OP_NAME]): + # for compatibility with old data (ts is float type) + ts = data[Constant.TS] if not isinstance(data[Constant.TS], float) else str(data[Constant.TS]) + fine_data.min_ts = min(fine_data.min_ts, ts) + fine_data.max_ts = max(fine_data.max_ts, ts) + + unique_fp_tid = list(raw_fp_tids - raw_bp_tids) + unique_bp_tid = list(raw_bp_tids) + fine_data.hcom_tids = list(raw_hcom_tids) + + if not unique_fp_tid or not unique_bp_tid: + print("[INFO] No fp or bp tid found in trace view data.") + else: + fine_data.fp_tid, fine_data.bp_tid = unique_fp_tid[0], unique_bp_tid[0] + + # filter fp ops and bp ops and torch_to_npu_links + for data in raw_data: + if self._is_fp_data(data, fine_data.fp_tid, fine_data.py_pid): + fine_data.fp_ops.append(data) + elif self._is_bp_data(data, fine_data.bp_tid, fine_data.py_pid): + fine_data.bp_ops.append(data) + elif self._is_torch_to_npu_link(data, fine_data.fp_tid): + fine_data.torch_to_npu_links.append(data) + elif self._is_npu_op(data, fine_data.ascend_pid): + fine_data.npu_ops_ts_dur[data[Constant.TS]] = data[Constant.DUR] + + fine_data.sort() + return fine_data diff --git a/profiler/advisor/interface/interface.py b/profiler/advisor/interface/interface.py index 156922f4d1..af801b9ded 100644 --- a/profiler/advisor/interface/interface.py +++ b/profiler/advisor/interface/interface.py @@ -4,14 +4,15 @@ from profiler.advisor.analyzer.schedule.fusion_ops.fusion_ops_analyzer import Ti from profiler.advisor.utils.utils import Timer from profiler.advisor.analyzer.cluster.slow_rank_analyser import SlowRankAnalyzer from profiler.advisor.analyzer.cluster.slow_link_analyser import SlowLinkAnalyzer - +from profiler.advisor.analyzer.overall.overall_summary_analyzer import OverallSummaryAnalyzer +from profiler.advisor.analyzer.computation.npu_fused.npu_slow_advice import NpuSlowAnalyzer class Interface: supported_analyzer = { "schedule": [TimelineFusionOpsAnalyzer], "computation": [], "communication": [], - "overall": [], + "overall": [OverallSummaryAnalyzer], "dataloader": [], "cluster": [SlowRankAnalyzer, SlowLinkAnalyzer] } diff --git a/profiler/cluster_analyse/cluster_analysis.py b/profiler/cluster_analyse/cluster_analysis.py index fd127fdc03..9ec33928aa 100644 --- a/profiler/cluster_analyse/cluster_analysis.py +++ b/profiler/cluster_analyse/cluster_analysis.py @@ -16,13 +16,13 @@ import argparse import os -from cluster_data_preprocess.pytorch_data_preprocessor import PytorchDataPreprocessor -from cluster_data_preprocess.mindspore_data_preprocessor import MindsporeDataPreprocessor -from communication_group.communication_group_generator import CommunicationGroupGenerator -from common_func.constant import Constant -from common_func.file_manager import FileManager -from common_func.path_manager import PathManager -from analysis.analysis_facade import AnalysisFacade +from profiler.cluster_analyse.cluster_data_preprocess.pytorch_data_preprocessor import PytorchDataPreprocessor +from profiler.cluster_analyse.cluster_data_preprocess.mindspore_data_preprocessor import MindsporeDataPreprocessor +from profiler.cluster_analyse.communication_group.communication_group_generator import CommunicationGroupGenerator +from profiler.cluster_analyse.common_func.constant import Constant +from profiler.cluster_analyse.common_func.file_manager import FileManager +from profiler.cluster_analyse.common_func.path_manager import PathManager +from profiler.cluster_analyse.analysis.analysis_facade import AnalysisFacade class ClusterAnalysis: diff --git a/profiler/cluster_analyse/common_func/file_manager.py b/profiler/cluster_analyse/common_func/file_manager.py index 28ecbeaaf1..00c9fb1bbe 100644 --- a/profiler/cluster_analyse/common_func/file_manager.py +++ b/profiler/cluster_analyse/common_func/file_manager.py @@ -17,8 +17,8 @@ import os import csv import json -from common_func.constant import Constant -from common_func.path_manager import PathManager +from profiler.cluster_analyse.common_func.constant import Constant +from profiler.cluster_analyse.common_func.path_manager import PathManager class FileManager: diff --git a/profiler/test/ut/advisor/advisor_backend/compute_advice/test_npu_slow_advice.py b/profiler/test/ut/advisor/advisor_backend/compute_advice/test_npu_slow_advice.py index 8830d49599..894367d070 100644 --- a/profiler/test/ut/advisor/advisor_backend/compute_advice/test_npu_slow_advice.py +++ b/profiler/test/ut/advisor/advisor_backend/compute_advice/test_npu_slow_advice.py @@ -6,7 +6,7 @@ import csv import unittest from advisor_backend.interface import Interface -from advisor_backend.compute_advice.npu_slow_advice import NpuSlowAdvice +from advisor_backend.compute_advice.npu_slow_advice import class TestNpuSlowAdvice(unittest.TestCase): @@ -186,7 +186,7 @@ class TestNpuSlowAdvice(unittest.TestCase): self.create_kernel_details() interface = Interface(self.ASCEND_PT_DIR) data = interface.get_data('compute', 'npu_slow') - call_stack = NpuSlowAdvice(self.ASCEND_PT_DIR).get_call_stack(data, index_id=0, ts_col="Start Time(us)") + call_stack = (self.ASCEND_PT_DIR).get_call_stack(data, index_id=0, ts_col="Start Time(us)") self.assertEqual(9, len(data)) self.assertEqual("", call_stack) @@ -197,8 +197,8 @@ class TestNpuSlowAdvice(unittest.TestCase): interface = Interface(self.ASCEND_PT_DIR) data = interface.get_data('compute', 'npu_slow') slow_op_data = data[data["color"] == "RED"] - NpuSlowAdvice.save_to_excel(data, file_path=os.path.join(self.ASCEND_PT_DIR, "slow_op.xlsx")) - call_stack = NpuSlowAdvice(self.ASCEND_PT_DIR).get_call_stack(data, index_id=0, ts_col="Start Time(us)") + .save_to_excel(data, file_path=os.path.join(self.ASCEND_PT_DIR, "slow_op.xlsx")) + call_stack = (self.ASCEND_PT_DIR).get_call_stack(data, index_id=0, ts_col="Start Time(us)") self.assertEqual(9, len(data)) self.assertEqual(2, len(slow_op_data)) print(call_stack) @@ -213,8 +213,8 @@ class TestNpuSlowAdvice(unittest.TestCase): interface = Interface(self.ASCEND_PT_DIR) data = interface.get_data('compute', 'npu_slow') slow_op_data = data[data["color"] == "RED"] - NpuSlowAdvice.save_to_excel(data, file_path=os.path.join(self.ASCEND_PT_DIR, "slow_op.xlsx")) - call_stack = NpuSlowAdvice(self.ASCEND_PT_DIR).get_call_stack(data, index_id=0, ts_col="Start Time(us)") + .save_to_excel(data, file_path=os.path.join(self.ASCEND_PT_DIR, "slow_op.xlsx")) + call_stack = (self.ASCEND_PT_DIR).get_call_stack(data, index_id=0, ts_col="Start Time(us)") self.assertEqual(9, len(data)) self.assertEqual(2, len(slow_op_data)) print(call_stack) -- Gitee From 0eda53956df2f3d2aef3a19b30df56289911454b Mon Sep 17 00:00:00 2001 From: shpity Date: Thu, 16 May 2024 18:28:20 +0800 Subject: [PATCH 13/21] add graph op fusion strategy --- profiler/advisor/analyzer/base_analyzer.py | 2 +- .../computation/aicpu/aicpu_checker.py | 4 +- .../computation/bound/block_dim_checker.py | 2 +- .../bound/operator_bound_checker.py | 2 +- .../op_compile/dynamic_shape_checker.py | 2 +- .../advisor/analyzer/graph_fusion/__init__.py | 0 .../graph_fusion/graph_fusion_analyzer.py | 47 ++ .../graph_fusion/graph_fusion_checker.py | 207 ++++++++ profiler/advisor/common/analyzer_scopes.py | 1 + profiler/advisor/common/graph/__init__.py | 0 profiler/advisor/common/graph/graph.py | 135 +++++ profiler/advisor/common/graph/graph_match.py | 355 +++++++++++++ profiler/advisor/common/graph/graph_parser.py | 413 +++++++++++++++ profiler/advisor/dataset/__init__.py | 6 - profiler/advisor/dataset/graph_dataset.py | 53 ++ .../display/html/templates/fusion.html | 47 ++ profiler/advisor/interface/interface.py | 8 +- profiler/advisor/rules/op_fusion_pass.yaml | 491 ++++++++++++++++++ profiler/cli/__init__.py | 2 +- profiler/cli/entrance.py | 2 +- .../cluster_analyse/common_func/db_manager.py | 3 + profiler/test/tools/tool.py | 2 +- 22 files changed, 1766 insertions(+), 18 deletions(-) create mode 100644 profiler/advisor/analyzer/graph_fusion/__init__.py create mode 100644 profiler/advisor/analyzer/graph_fusion/graph_fusion_analyzer.py create mode 100644 profiler/advisor/analyzer/graph_fusion/graph_fusion_checker.py create mode 100644 profiler/advisor/common/graph/__init__.py create mode 100644 profiler/advisor/common/graph/graph.py create mode 100644 profiler/advisor/common/graph/graph_match.py create mode 100644 profiler/advisor/common/graph/graph_parser.py create mode 100644 profiler/advisor/dataset/graph_dataset.py create mode 100644 profiler/advisor/display/html/templates/fusion.html create mode 100644 profiler/advisor/rules/op_fusion_pass.yaml diff --git a/profiler/advisor/analyzer/base_analyzer.py b/profiler/advisor/analyzer/base_analyzer.py index 160f05c464..e2f3abc537 100644 --- a/profiler/advisor/analyzer/base_analyzer.py +++ b/profiler/advisor/analyzer/base_analyzer.py @@ -80,7 +80,7 @@ class BaseAnalyzer(VersionControl, metaclass=ABCMeta): key = dataset_cls.get_key() if key not in self.dataset_list: self.dataset_list[key] = [] - self.dataset_list[key].append(dataset) + self.dataset_list[key].append(dataset) @staticmethod def get_first_data_by_key(data, key) -> Union[Dataset, None]: diff --git a/profiler/advisor/analyzer/computation/aicpu/aicpu_checker.py b/profiler/advisor/analyzer/computation/aicpu/aicpu_checker.py index 4654d97225..052711f29b 100644 --- a/profiler/advisor/analyzer/computation/aicpu/aicpu_checker.py +++ b/profiler/advisor/analyzer/computation/aicpu/aicpu_checker.py @@ -42,7 +42,7 @@ class AicpuChecker(OperatorChecker): def load_aicpu_rules(self, rule_path="rules/aicpu_rules.yaml") -> Dict: if not os.path.isabs(rule_path): rule_path = os.path.join(os.path.dirname(__file__), - "../../computation/", "../", rule_path) + "../../../", rule_path) if not os.path.exists(rule_path): logger.warning("Skip analyze aicpu issues, because %s does not exist.", rule_path) @@ -146,7 +146,7 @@ class AicpuChecker(OperatorChecker): return True def make_render(self, html_render, record): - html_render.render_template(key="operator", + html_render.render_template(key="computation", template_dir="templates", template_name="operator_ai_cpu.html", format_result=self.format_operator_result(record, constant.OPERATOR_LIST_UNLIMIT)) diff --git a/profiler/advisor/analyzer/computation/bound/block_dim_checker.py b/profiler/advisor/analyzer/computation/bound/block_dim_checker.py index d1a1384b8d..d90ef56c78 100644 --- a/profiler/advisor/analyzer/computation/bound/block_dim_checker.py +++ b/profiler/advisor/analyzer/computation/bound/block_dim_checker.py @@ -43,7 +43,7 @@ class BlockDimChecker(OperatorChecker): return True def make_render(self, html_render, record): - html_render.render_template(key="operator", + html_render.render_template(key="computation", template_dir="templates", template_name="operator_block_dim.html", format_result=self.format_operator_result(record, constant.OPERATOR_OUT_TOPK)) diff --git a/profiler/advisor/analyzer/computation/bound/operator_bound_checker.py b/profiler/advisor/analyzer/computation/bound/operator_bound_checker.py index d919eb7d5f..4ede3c94e6 100644 --- a/profiler/advisor/analyzer/computation/bound/operator_bound_checker.py +++ b/profiler/advisor/analyzer/computation/bound/operator_bound_checker.py @@ -44,7 +44,7 @@ class OperatorBoundChecker(OperatorChecker): return True def make_render(self, html_render, record): - html_render.render_template(key="operator", + html_render.render_template(key="computation", template_dir="templates", template_name="operator_no_bound.html", format_result=self.format_operator_result(record, constant.OPERATOR_OUT_TOPK)) diff --git a/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py b/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py index 4d405eb918..746cc71606 100644 --- a/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py +++ b/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py @@ -76,7 +76,7 @@ class DynamicShapeChecker(OperatorChecker): return format_result def make_render(self, html_render, record): - html_render.render_template(key="operator", + html_render.render_template(key="computation", template_dir="templates", template_name="operator_dynamic_shape.html", format_result=self.format_operator_result(record)) diff --git a/profiler/advisor/analyzer/graph_fusion/__init__.py b/profiler/advisor/analyzer/graph_fusion/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/analyzer/graph_fusion/graph_fusion_analyzer.py b/profiler/advisor/analyzer/graph_fusion/graph_fusion_analyzer.py new file mode 100644 index 0000000000..059950089d --- /dev/null +++ b/profiler/advisor/analyzer/graph_fusion/graph_fusion_analyzer.py @@ -0,0 +1,47 @@ +from typing import List +from functools import partial + +from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer +from profiler.advisor.result.result import OptimizeResult +from profiler.advisor.dataset.graph_dataset import GraphDataset +from profiler.advisor.analyzer.graph_fusion.graph_fusion_checker import GraphFusionRules +from profiler.advisor.dataset.profiling.profiling_dataset import ProfilingDataset +from profiler.advisor.display.html.render import HTMLRender + + +class FusionOPAnalyzer(BaseAnalyzer): + """ + fusion optimizer + """ + RULES = dict(graph_dataset=partial(GraphFusionRules, "rules/op_fusion_pass.yaml")) + dataset_cls_list = [GraphDataset, ProfilingDataset] + + def __init__(self, collection_path, **kwargs) -> None: + super(FusionOPAnalyzer, self).__init__(collection_path, **kwargs) + self.result = OptimizeResult() + self.html_render = HTMLRender() + + @BaseAnalyzer.check_data((GraphDataset.get_key(),)) + def optimize(self): + """ + :return: result + """ + self._check(self.dataset_list.get("GraphDataset"), self.dataset_list.get("ProfilingDataset")) + return self.result + + def _check(self, graph_data: List[GraphDataset], + profiling_data: List[ProfilingDataset] = None) -> None: + for _, rule in self.RULES.items(): + checker = rule() + if profiling_data is None: + checker.find_fusion_matched_issues(graph_data) + else: + checker.find_fusion_matched_issues_with_times(graph_data, profiling_data) + checker.make_record(self.result) + checker.make_render(self.html_render) + + def make_record(self): + pass + + def make_render(self): + pass diff --git a/profiler/advisor/analyzer/graph_fusion/graph_fusion_checker.py b/profiler/advisor/analyzer/graph_fusion/graph_fusion_checker.py new file mode 100644 index 0000000000..e64020fdfe --- /dev/null +++ b/profiler/advisor/analyzer/graph_fusion/graph_fusion_checker.py @@ -0,0 +1,207 @@ +import logging +from typing import List + +from tqdm import tqdm + +from profiler.advisor.result.result import OptimizeResult +from profiler.advisor.result.item import OptimizeItem, OptimizeRecord, StatisticsItem +from profiler.advisor.common.graph.graph import Graph +from profiler.advisor.common.graph.graph_parser import QueryGraphParser +from profiler.advisor.dataset.graph_dataset import GraphDataset +from profiler.advisor.common.graph.graph_match import find_isomorphisms + +logger = logging.getLogger() + + +class GraphFusionRules: + def __init__(self, fusion_rules: str): + self.fusion_rules = fusion_rules + self.candidates = [] + self.task_duration_list = [] + + @staticmethod + def build_query_graph(query_graphs) -> List[Graph]: + for _, query_graph in query_graphs.fusion_rules.items(): + for sub_graph in query_graph: + graph = Graph(*sub_graph) + graph.build() + yield graph + + def find_fusion_matched_issues(self, graphs: List[GraphDataset]): + query_graphs = QueryGraphParser(self.fusion_rules) + with tqdm(total=query_graphs.num_rules, leave=False, ncols=100, unit=" rules") as pbar: + pbar.set_description(f"Searching Isomorphic Subgraph") + for query_graph in self.build_query_graph(query_graphs): + query_candidates = find_isomorphisms(query_graph.graph, graphs[0].graphs[-1].graph) + pbar.update(1) + if len(query_candidates) > 0: + self.candidates.append(query_candidates) + + def find_fusion_matched_issues_with_times(self, graphs: List[GraphDataset], profiling): + self.find_fusion_matched_issues(graphs) + if len(self.candidates) == 0 or len(profiling) == 0: + return + + if not hasattr(profiling[0], 'op_summary') or profiling[0].op_summary is None: + if hasattr(profiling[0], 'msprof'): + self.match_time_from_msprof(profiling[0].msprof) + return + else: + logger.warning("Skip analyze operator because of not containing op summary.") + return + + self.match_time_from_summary(profiling[0].op_summary) + time_duration_sum = [] + for task_duration in self.task_duration_list: + time_duration_sum.append(sum([sum(duration) for duration in task_duration])) + time_duration_index = sorted(range(len(time_duration_sum)), + key=time_duration_sum.__getitem__, + reverse=True) + self.task_duration_list = [self.task_duration_list[i] for i in time_duration_index] + self.candidates = [self.candidates[i] for i in time_duration_index] + + def match_time_from_summary(self, op_summary): + op_dict = op_summary.task_dict + for candidates in self.candidates: + candidate_duration = [] + for candidate in candidates: + duration_list = [] + for node in candidate.values(): + if node.op_name not in op_dict or op_dict[node.op_name][0].op_type.lower() != node.op_type.lower(): + logger.warning("Operator %s is missing in op summary, which will be set to 0.", node.op_name) + duration_list.append(0.0) + continue + duration_list.append(float(op_dict[node.op_name][0].task_duration)) + candidate_duration.append(duration_list) + self.task_duration_list.append(candidate_duration) + + def match_time_from_msprof(self, msprof): + op_dict = dict() + for task in msprof.tasks: + if "item_id" not in task.args: + continue + op_dict[task.args["item_id"]] = {"task_duration": task.dur} + for candidates in self.candidates: + candidate_duration = [] + for candidate in candidates: + duration_list = [] + for node in candidate.values(): + if node.op_name not in op_dict: + logger.warning("Operator %s is missing in msprof, which will be set to 0.", node.op_name) + duration_list.append(0.0) + continue + duration_list.append(float(op_dict[node.op_name].get("task_duration"))) + candidate_duration.append(duration_list) + self.task_duration_list.append(candidate_duration) + + def make_render(self, html_render): + if not self.candidates: + return + + candidates_list = [] + for case_id, nodes in enumerate(self.candidates): + candidate_dict = dict() + candidate_dict['counts'] = len(nodes) + candidate_dict['matches'] = [] + has_time_info = False + if self.task_duration_list: + has_time_info = True + candidate_dict['total_duration'] = round(sum(sum(duration) for duration in + self.task_duration_list[case_id]), 2) + for node_index, refer_node in enumerate(nodes): + match = [] + index = 0 + pass_name = ','.join(item.op_type for item in refer_node.keys()) + for query_node, host_node in refer_node.items(): + fusion_pattern = query_node.op_pass + + if 'op_pass' not in candidate_dict: + candidate_dict['op_pass'] = fusion_pattern + if 'fusion_pattern' not in candidate_dict: + candidate_dict['fusion_pattern'] = pass_name + match_attr = dict() + match_attr['op_name'] = host_node.op_name + match_attr['dtype'] = query_node.op_type + if has_time_info: + match_attr['duration'] = round(self.task_duration_list[case_id][node_index][index], 2) + index += 1 + match.append(match_attr) + match_attr = dict() + match_attr['op_name'] = "-" + match_attr['dtype'] = "-" + if has_time_info: + match_attr['duration'] = round(sum(self.task_duration_list[case_id][node_index]), 2) + match.append(match_attr) + candidate_dict['matches'].append(match) + candidates_list.append(candidate_dict) + html_render.render_template(key="computation", + template_dir="templates", + template_name="fusion.html", + candidates=candidates_list) + + def make_record(self, result: OptimizeResult): + """ + make record for what and how to optimize + """ + if not self.candidates: + return + + optimization_item = OptimizeItem( + "fusion issue", + f"Found {len(self.candidates)} fusion issues", + ["Check fusion issues detail in att_advisor*.html"] + ) + total_time = 0.0 + for candidate in self.task_duration_list: + for duration in candidate: + total_time += sum(duration) + statistics_item = StatisticsItem(0, + total_time, + sum([len(candidate) for candidate in self.candidates]) + ) + result.add(OptimizeRecord(optimization_item, statistics_item)) + + record_title = [ + "issue_id", "graph_name", "op_name", "fusion_structure", "fusion_pattern", + "op_type", "input_shape", "input_format", + "input_dtype", "output_shape", "output_format", "output_dtype" + ] + result.add_detail('fusion issues', headers=record_title) + + for case_id, nodes in enumerate(self.candidates): + for _, refer_node in enumerate(nodes): + pass_name = ','.join(item.op_type for item in refer_node.keys()) + for query_node, host_node in refer_node.items(): + fusion_pattern = query_node.op_pass + detail = [ + case_id, + host_node.graph_name, + host_node.op_name, + pass_name, + fusion_pattern, + query_node.op_type, + self.get_attr_shape(host_node, "input", "shape"), + self.get_attr_type(host_node, "input", "format"), + self.get_attr_type(host_node, "input", "dtype"), + self.get_attr_shape(host_node, "output", "shape"), + self.get_attr_type(host_node, "output", "format"), + self.get_attr_type(host_node, "output", "dtype"), + ] + result.add_detail('fusion issues', detail=detail) + + @staticmethod + def get_attr_shape(node, type_name: str, attr_name: str) -> str: + attr_shape = [] + node_attrs = getattr(node, type_name, []) + for attrs in node_attrs: + attr = getattr(attrs, attr_name, []) + attr_shape.append(",".join(attr)) + return ";".join(attr_shape) + + @staticmethod + def get_attr_type(node, type_name: str, attr_name: str) -> str: + attr_type = [] + node_attrs = getattr(node, type_name, []) + for attr in node_attrs: + attr_type.append(getattr(attr, attr_name, "")) + return ";".join(attr_type) diff --git a/profiler/advisor/common/analyzer_scopes.py b/profiler/advisor/common/analyzer_scopes.py index 03d7759a72..0c6a2ac260 100644 --- a/profiler/advisor/common/analyzer_scopes.py +++ b/profiler/advisor/common/analyzer_scopes.py @@ -3,6 +3,7 @@ class SupportedScopes: # used for specify fourth-level commands and define the key of the result dict # the key defined bellow must be the same as value TIMELINE_FUSION_OPS = "timeline_fusion_ops" + GRAPH = "graph" SLOW_RANK = "slow_rank" SLOW_LINK = "slow_link" PORFILING_OPERATOR_ANALYSIS = "profiling_operator_analysis" diff --git a/profiler/advisor/common/graph/__init__.py b/profiler/advisor/common/graph/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/advisor/common/graph/graph.py b/profiler/advisor/common/graph/graph.py new file mode 100644 index 0000000000..6bab2042de --- /dev/null +++ b/profiler/advisor/common/graph/graph.py @@ -0,0 +1,135 @@ +import logging +from typing import Dict, List, Tuple, Callable, Any, Optional, Union + +import networkx as nx + +from profiler.advisor.common.graph.graph_parser import HostGraphNode, QueryGraphNode + +logger = logging.getLogger() + + +class Graph: + """ + Graph Struct + """ + + # pylint: disable=too-many-instance-attributes + def __init__(self, + nodes: Dict[str, Optional[Union[HostGraphNode, QueryGraphNode]]] = None, + edges: List[Tuple[Optional[Union[HostGraphNode, QueryGraphNode]], + Optional[Union[HostGraphNode, QueryGraphNode]]]] = None, + name: str = None): + self.name = name + self.graph = nx.DiGraph(name=name) + self.nodes = nodes if nodes is not None else {} + self.edges = edges if edges is not None else list() + + def build(self): + for op_name, node in self.nodes.items(): + # add node and mark op_name as tag + self.add_node(node, + op_type=node.op_type + ) + for edge in self.edges: + self.add_edge(*edge) + return self.graph + + def get_size(self) -> Dict[str, int]: + if not hasattr(self.graph, "nodes"): + return {"edges": 0, "nodes": 0} + + return {"edges": len(self.graph.edges), + "nodes": len(self.graph.nodes)} + + def add_node(self, node: HostGraphNode, **kwargs): + if node is None: + return + self.graph.add_node(node, **kwargs) + + def add_edge(self, pre_node: HostGraphNode, next_node: HostGraphNode): + if pre_node is None or next_node is None: + return + + if pre_node not in self.graph or \ + next_node not in self.graph: + logging.error("Nodes between edge should be both exists.") + return + + self.graph.add_edge(pre_node, next_node) + + def add_node_with_edge(self, node, adj_nodes: List[HostGraphNode]): + self.add_node(node) + for adj in adj_nodes: + self.add_edge(node, adj) + + def remove_node(self, node: HostGraphNode = None) -> None: + if node is None: + return + + self.graph.remove_node(node) + + def remove_edge(self, pre_node: HostGraphNode = None, next_node: HostGraphNode = None) -> None: + if pre_node is None or next_node is None: + raise ValueError(f"Invalid edge from {pre_node} to {pre_node}.") + + self.remove_edge(pre_node, next_node) + + def get_subgraph(self, nodes: List[HostGraphNode]) -> nx.DiGraph: + nodes = list(set(nodes)) + for node in nodes: + if not self.is_node_exists(node): + raise ValueError(f"Failed to subtract subgraph because {node.op_name} is not in the graph.") + + return self.graph.subgraph(nodes) + + def highlight_subgraph(self, subgraph: nx.DiGraph = None) -> None: + pass + + def get_node(self, node: HostGraphNode): + if node not in self.graph: + return + + return self.graph[node] + + def get_node_by_name(self, node_name: str): + return self.nodes.get(node_name, None) + + def is_node_exists(self, node: HostGraphNode): + return node in self.graph + + def draw(self, + graph: nx.DiGraph = None, + with_labels: bool = False, + labels: Dict[HostGraphNode, Any] = None, + pos_func: Callable = None, + font_weight: str = "bold", + savefig: bool = False, + node_size: int = 50, + **kwargs + ): + try: + import matplotlib.pylab as plt + except ImportError: + logger.error('Please install matplotlib first by using `pip install matplotlib`.') + return + + if graph is None: + graph = self.graph + + pos = pos_func(graph) if pos_func is not None else None + + if with_labels: + if labels is None: + labels = {k: f"{k}\n({v['op_name']})" for k, v in graph.nodes.items()} + + nx.draw(graph, + with_labels=with_labels, + pos=pos, + node_size=node_size, + font_weight=font_weight, + labels=labels, + **kwargs + ) + if savefig: + plt.savefig(self.name + ".png") + plt.show() diff --git a/profiler/advisor/common/graph/graph_match.py b/profiler/advisor/common/graph/graph_match.py new file mode 100644 index 0000000000..d0dfc16295 --- /dev/null +++ b/profiler/advisor/common/graph/graph_match.py @@ -0,0 +1,355 @@ +import itertools +import logging +from functools import lru_cache +from collections import deque +from typing import Dict, Generator, List, Callable, Hashable, Tuple + +import networkx as nx + + +@lru_cache() +def match_node_attr_fun(query_node: Hashable, + host_node: Hashable, + query_graph: nx.Graph, + host_graph: nx.Graph + ) -> bool: + """ + Check query node matches the attributes in host graph + + :param query_node: Query graph node + :param host_node: Host graph node + :param query_graph: Query Graph + :param host_graph: Host graph + :return: bool, match or not + """ + # get node attr + if query_node not in query_graph.nodes or host_node not in host_graph.nodes: + return False + + query_node = query_graph.nodes[query_node] + host_node = host_graph.nodes[host_node] + for attr, val in query_node.items(): + if attr not in host_node: + return False + if isinstance(host_node[attr], str) and isinstance(val, str): + if host_node[attr].lower() != val.lower(): + return False + else: + if host_node[attr] != val: + return False + return True + + +@lru_cache() +def match_node_struct_fun(query_node: Hashable, + host_node: Hashable, + query_graph: nx.Graph, + host_graph: nx.Graph + ) -> bool: + """ + Check query node matches the structure in host graph + + :param query_node: Query graph node + :param host_node: Host graph node + :param query_graph: Query Graph + :param host_graph: Host graph + :return: bool, match or not + """ + if query_node not in query_graph.nodes or host_node not in host_graph.nodes: + return False + + return host_graph.degree(host_node) >= query_graph.degree(query_node) + + +@lru_cache() +def match_edge_attr_fun(query_edge: Tuple[Hashable, Hashable], + host_edge: Tuple[Hashable, Hashable], + query_graph: nx.Graph, + host_graph: nx.Graph + ) -> bool: + """ + Check query edge matches the attr in host graph + + :param query_edge: Query graph edge + :param host_edge: Host graph edge + :param query_graph: Query Graph + :param host_graph: Host graph + :return: bool, match or not + """ + # get edge attr + if query_edge not in query_graph.edges or host_edge not in host_graph.edges: + return False + + query_edge = query_graph.edges[query_edge] + host_edge = host_graph.edges[host_edge] + for attr, val in query_edge.items(): + if attr not in host_edge: + return False + if isinstance(host_edge[attr], str) and isinstance(val, str): + if host_edge[attr].lower() != val.lower(): + return False + else: + if host_edge[attr] != val: + return False + return True + + +def find_isomorphisms(query_graph: nx.Graph, + host_graph: nx.Graph, + *args, + _node_attr_fun: Callable = match_node_attr_fun, + _node_struct_fun: Callable = match_node_struct_fun, + _edge_attr_fun: Callable = match_edge_attr_fun, + limit: int = None, + **kwargs) -> List[Dict[Hashable, Hashable]]: + """ + Find all the sub graphs that are isomorphic to query_graph in host_graph . + + :param query_graph: The graph object to query + :param host_graph: The graph object to be queried + :param args: Position args + :param _node_attr_fun: The function to match node attr + :param _node_struct_fun: The function to match node structural + :param _edge_attr_fun: The function to match edge attr + :param limit: The limitation for the number of returned mappings + :param kwargs: Keyword args + :return: Matched node mapping list + ``` + [{query_id: host_id, ...}, ...] + ``` + """ + candidates = [] + for query_result in find_isomorphisms_iter( + query_graph, + host_graph, + *args, + _node_attr_fun=_node_attr_fun, + _node_struct_fun=_node_struct_fun, + _edge_attr_fun=_edge_attr_fun, + **kwargs + ): + candidates.append(query_result) + if limit and len(candidates) >= limit: + return candidates + return candidates + + +def find_isomorphisms_iter(query_graph: nx.Graph, + host_graph: nx.Graph, + directed: bool = None, + _node_attr_fun: Callable = None, + _node_struct_fun: Callable = None, + _edge_attr_fun: Callable = None, + ) -> Generator[Dict[Hashable, Hashable], None, None]: + """ + A generation to find one isomorphic subgraph in host_graph for query_graph. + + :param query_graph: The graph object to query + :param host_graph: The graph object to be queried + :param directed: Whether direction should be considered during search + :param _node_attr_fun: The function to match node attr + :param _node_struct_fun: The function to match node structural + :param _edge_attr_fun: The function to match edge attr + :return: Yield mappings from query node IDs to host graph IDs: {query_id: host_id, ...} + + """ + if directed is None: + # query graph and host graph should consider directions. + if isinstance(query_graph, nx.DiGraph) and \ + isinstance(host_graph, nx.DiGraph): + directed = True + else: + directed = False + + # Initialize queue + dq = deque() + dq.appendleft({}) + + while len(dq) > 0: + backbone = dq.pop() + next_candidate_backbones = get_next_candidates(backbone=backbone, + query_graph=query_graph, + host_graph=host_graph, + directed=directed, + _node_attr_fun=_node_attr_fun, + _node_struct_fun=_node_struct_fun, + _edge_attr_fun=_edge_attr_fun, + ) + for candidate in next_candidate_backbones: + # find a legal isomorphism + if len(candidate) == len(query_graph): + yield candidate + else: + # continue to search + dq.appendleft(candidate) + + +def get_next_candidates( + backbone: Dict, + query_graph: nx.Graph, # noqa + host_graph: nx.Graph, # noqa + next_node: Hashable = None, + directed: bool = True, # noqa + _node_attr_fun: Callable = None, # noqa + _node_struct_fun: Callable = None, # noqa + _edge_attr_fun: Callable = None # noqa +) -> List[Dict[Hashable, Hashable]]: + """ + Get a list of candidate node assignments for the next "step" of this map. + + :param backbone: Mapping of query node IDs to one set of host graph IDs + :param next_node: Optional suggestion for the next node to assign + :return: List[Dict[Hashable, Hashable]]: A new list of node mappings with one additional element mapped + """ + node_priority = {n: 1 for n in query_graph.nodes} + candidate_nodes = [] + + if next_node is None and len(backbone) == 0: + # Start case + next_node = max(node_priority.keys(), + key=lambda x: node_priority.get(x, 0)) + + for node in host_graph.nodes: + if _node_attr_fun(next_node, node, query_graph, host_graph) and \ + _node_struct_fun(next_node, node, query_graph, host_graph): + candidate_nodes.append({next_node: node}) + return candidate_nodes + + nodes_with_maximum_backbone = [] + for query_node_id in query_graph.nodes: + if query_node_id in backbone: + continue + + backbone_neighbors = [] + if not directed: + backbone_neighbors = query_graph.adj[query_node_id] + else: + # nx.DiGraph.pred: A <- B: find previous node from B to A + # nx.DiGraph.adj: A -> B : find next node from A to B + backbone_neighbors = list(set(query_graph.adj[query_node_id]).union(set(query_graph.pred[query_node_id]))) + + query_backbone_node_count = sum([1 for _node in backbone_neighbors if _node in backbone]) + if query_backbone_node_count > 0: + # Find a longer backbone node + nodes_with_maximum_backbone.append(query_node_id) + + # next_node is connected to the current backbone. + next_node = max(nodes_with_maximum_backbone, key=lambda x: node_priority.get(x, 0)) + + # verify all edges between `next_node` and nodes in the backbone are exist in host graph + # Step1: find all edges between `next_node` and nodes in the backbone + next_edge_edges = [] + for _node in query_graph.adj[next_node]: + if _node in backbone: + # `next_node` -> `_node` + next_edge_edges.append((None, next_node, _node)) + + if directed: + for _node in query_graph.pred[next_node]: + if _node in backbone: + # `_node` -> `next_node` + next_edge_edges.append((_node, next_node, None)) + + if len(next_edge_edges) == 0: + logging.warning("Find node without any edge, which is invalid.") + return [] + # Step2: verify candidate nodes that have such edges in the host graph + candidate_nodes = [] + if len(next_edge_edges) == 1: + source, _, target = next_edge_edges[0] + if not directed: + candidate_nodes = list(host_graph.adj[backbone[target]]) + else: + if source is not None: + # means `source` is a `from` edge + candidate_nodes = list(host_graph.adj[backbone[source]]) + elif target is not None: + # means `target` is a `from` edge + candidate_nodes = list(host_graph.pred[backbone[target]]) + + elif len(next_edge_edges) > 1: + candidate_nodes_set = set() + for (source, _, target) in candidate_nodes: + if not directed: + candidate_nodes_from_this_edge = host_graph.adj[backbone[target]] + else: + if source is not None: + candidate_nodes_from_this_edge = host_graph.adj[backbone[source]] + else: # target is not None: + candidate_nodes_from_this_edge = host_graph.pred[backbone[target]] + + if len(candidate_nodes_set) > 0: + candidate_nodes_set = candidate_nodes_set.intersection(candidate_nodes_from_this_edge) + else: + # Initialize candidate_nodes_set + candidate_nodes_set.update(candidate_nodes_from_this_edge) + candidate_nodes = list(candidate_nodes_set) + + tentative_results = [] + for _node in candidate_nodes: + if all([_node not in backbone.values(), + _node_attr_fun(next_node, _node, query_graph, host_graph), + _node_struct_fun(next_node, _node, query_graph, host_graph)] + ): + tentative_results.append({**backbone, + next_node: _node}) + + final_candidates = check_edges_mapping(tentative_results, + query_graph=query_graph, + host_graph=host_graph, + _edge_attr_fun=_edge_attr_fun) + return final_candidates + + +def check_edges_mapping(candidates: List[Dict[Hashable, Hashable]], + query_graph: nx.Graph, + host_graph: nx.Graph, + _edge_attr_fun: Callable = None + ) -> List[Dict[Hashable, Hashable]]: + """ + Check that all edges between the assigned nodes exist in the host graph. + + :param candidates: mapping nodes candidates + :param query_graph: The graph object to query + :param host_graph: The graph object to be queried + :param _edge_attr_fun: The function to match edge attr + :return: + """ + monomorphism_candidates = [] + + for candidate in candidates: + if len(candidate) != len(query_graph): + monomorphism_candidates.append(candidate) + continue + + all_pass_flag = True + for edge_start, edge_end in query_graph.edges: + # check edge in host graph + if not host_graph.has_edge(candidate[edge_start], candidate[edge_end]): + all_pass_flag = False + break + + # check edge attr + if _edge_attr_fun is None or not _edge_attr_fun( + (edge_start, edge_end), + (candidate[edge_start], candidate[edge_end]), + query_graph, + host_graph + ): + all_pass_flag = False + break + + if all_pass_flag: + monomorphism_candidates.append(candidate) + + # Isomorphisms check + final_candidates = [] + for candidate in monomorphism_candidates: + all_product = itertools.product(candidate.keys(), candidate.keys()) + for edge_start, edge_end in all_product: + if not query_graph.has_edge(edge_start, edge_end) and \ + host_graph.has_edge(candidate[edge_start], candidate[edge_end]): + break + else: + final_candidates.append(candidate) + return final_candidates diff --git a/profiler/advisor/common/graph/graph_parser.py b/profiler/advisor/common/graph/graph_parser.py new file mode 100644 index 0000000000..d4c67fc191 --- /dev/null +++ b/profiler/advisor/common/graph/graph_parser.py @@ -0,0 +1,413 @@ +import os +import logging +import yaml +import itertools +from collections import deque +from dataclasses import dataclass +from typing import List, Tuple, Dict + +logger = logging.getLogger() + + +@dataclass +class Tensor: + def __init__(self): + super().__init__() + self.shape = [] + self.origin_shape = [] + self.shape_range = [] + self.origin_shape_range = [] + self.dtype = "" + self.origin_data_type = "" + self.format = "" + self.origin_format = [] + + +@dataclass +class Attr: + + def __init__(self): + super().__init__() + self.key = str() + self.value = [] + + +class HostGraphNode: + def __init__(self): + super().__init__() + self.graph_name = str() + self.op_name = str() + self.op_type = str() + self.inputs = [] + self.input = [] + self.outputs = [] + self.output = [] + self.strides = [] + self.pads = [] + self.groups = "" + self.dilations = [] + self.kernelname = "" + self._attrs = [] + + def __repr__(self): + return f"" + + +@dataclass +class HostGraph: + def __init__(self): + super().__init__() + self.name = "" + self.nodes = {} + self.inputs = [] + self.edges = [] + self.model_name = None + self.file_path = None + + def build(self): + """build a graph""" + for name, node in self.nodes.items(): + for input_node in node.inputs: + if input_node not in self.nodes: + continue + self.nodes[input_node].outputs.append(name) + + +class HostGraphParser: + """ + Parse graph metadata from text file + """ + def __init__(self, file_path): + self.buffer = deque(maxlen=100) + self.line_no = 0 + self._file_path = file_path + self.edges: List[Tuple[HostGraphNode, HostGraphNode]] = [] + self.nodes: Dict[str, HostGraphNode] = {} + self.graphs = self._parse(self._file_path) + self._get_node_dict() + self._get_edges_list() + del self.graphs[0] + + @staticmethod + def _get_key_value( line): + res = line.split(':', 1) + return res[0].strip(), res[1].strip().strip('"') + + @staticmethod + def _parse_attr(key, value, obj): + if not isinstance(obj, list) and not obj: + return + if key == "dim" and hasattr(obj, "shape"): + obj.shape.append(value) + elif key == "name" and hasattr(obj, "op_name"): + obj.op_name = value + elif key == "name" and hasattr(obj, "name"): + obj.name = value + elif key == "dtype" and hasattr(obj, "dtype"): + obj.dtype = value + elif key == "layout" and hasattr(obj, "format"): + obj.format = value + elif key == "type" and hasattr(obj, "op_type"): + obj.op_type = value + elif key == "input" and hasattr(obj, "input"): + obj.inputs.append(value.strip('"').split(':')[0]) + elif key == "key" and hasattr(obj, "key"): + obj.key = value + elif hasattr(obj, key): + setattr(obj, key, value) + elif isinstance(obj, list) and key != "val_type": + obj.append(value) + + def _parse_struct(self, in_file, key, in_obj): + + def parse_shape(file, obj): + obj = self._parse_line(file, obj) + + def parse_input_desc(file, obj): + tensor = self._parse_line(file, Tensor()) + if obj and hasattr(obj, "input"): + obj.input.append(tensor) + + def parse_out_desc(file, obj): + tensor = self._parse_line(file, Tensor()) + if obj and hasattr(obj, "output"): + obj.output.append(tensor) + + def parse_op(file, obj: HostGraph): + node = self._parse_line(file, HostGraphNode()) + if hasattr(obj, "name"): + node.graph_name = obj.name + if obj and hasattr(obj, "nodes") and node.op_name: + obj.nodes[node.op_name] = node + + def parse_graph(file, obj): + graph = self._parse_line(file, HostGraph()) + obj.append(graph) + + def parse_attr(file, obj): + attr = self._parse_line(file, Attr()) + if hasattr(obj, attr.key): + if attr.key not in ['format']: + setattr(obj, attr.key, attr.value) + elif attr.key.endswith("_kernelname"): + setattr(obj, "kernelname", attr.value) + if obj and hasattr(obj, "get_attrs"): + obj.get_attrs().append(attr) + + def parse_list(file, obj): + value = [] + self._parse_line(file, value) + if isinstance(obj, list): + obj.append(value) + else: + obj = value + + def parse_value(file, obj): + if hasattr(obj, "value"): + obj.value = self._parse_line(file, obj.value) + + def parse_default(file, _obj=None): + """function with unused argument""" + self._parse_line(file, None) + + parse_methods = { + "shape": parse_shape, + "input_desc": parse_input_desc, + "output_desc": parse_out_desc, + "op": parse_op, + "graph": parse_graph, + "attr": parse_attr, + "list_list_int": parse_list, + "list_list_i": parse_list, + "list": parse_list, + "value": parse_value, + } + parse_methods.get(key, parse_default)(in_file, in_obj) + + def _read_line(self, file): + self.line_no += 1 + line = file.readline() + if line.strip().endswith('}'): + end_line = "" + while self.buffer and not end_line.strip().endswith("{"): + end_line = self.buffer.pop() + else: + self.buffer.append(line) + return line.strip() + + def _parse_line(self, file, obj=None): + line = self._read_line(file) + try: + while line and not line.endswith("}"): + if line.endswith('{'): + key = line.rstrip('{').strip() + self._parse_struct(file, key, obj) + else: + key, value = self._get_key_value(line) + self._parse_attr(key, value, obj) + line = self._read_line(file) + except Exception as exception: + if self.buffer: + logger.debug("***********************graph content**************************") + while self.buffer: + line = self.buffer.popleft() + logger.debug(line) + logger.debug("***********************graph content**************************") + raise exception + return obj + + def _parse(self, graph_file): + # pylint:disable=broad-except + graph_list = [] + with open(graph_file, "r", encoding="gbk") as file: + try: + graph_list = self._parse_line(file, graph_list) + except Exception: + logger.error( + "Parse line %s of file %s failed, make sure the format is correct.", self.line_no, graph_file + ) + graphs = [] + for graph in graph_list: + if isinstance(graph, HostGraph): + graphs.append(graph) + for graph in graphs: + graph.model_name = graphs[0].name + graph.file_path = self._file_path + graph.build() + return graphs + + def _get_edges_list(self) -> None: + if len(self.graphs) <= 0: + return + + def is_repeat_edge(edge, edge_collector): + for _edge in edge_collector: + if edge[0].op_name == _edge[0].op_name and edge[1].op_name == _edge[1].op_name: + return True + return False + + for node in self.nodes.values(): + for input_node_name in node.inputs: + if input_node_name not in self.nodes: + continue + input_node = self.nodes[input_node_name] + if not is_repeat_edge((input_node, node), self.edges): + self.edges.append((input_node, node)) + for output_node_name in node.outputs: + if output_node_name not in self.nodes: + continue + output_node = self.nodes[output_node_name] + if not is_repeat_edge((node, output_node), self.edges): + self.edges.append((node, output_node)) + + def _get_node_dict(self) -> None: + if not self.graphs: + self.nodes = {} + return + self.nodes = {node.op_name: node for graph in self.graphs for node in graph.nodes.values()} + + +class QueryGraphNode: + """ + Graph Node + """ + _ID = 0 + + def __init__(self, op_type: str, op_pass: str): + self._op_type = op_type + self._id = QueryGraphNode._ID + self._op_pass = op_pass + QueryGraphNode._ID += 1 + + def get_property(self, name): + """ + get property + """ + return getattr(self, name, lambda: None) + + @property + def op_type(self): + return self._op_type + + @property + def op_name(self): + return self._op_type + "_id_" + str(self._id) + + @property + def op_pass(self): + return self._op_pass + + @op_type.setter + def op_type(self, op_type): + self._op_type = op_type + + def __eq__(self, other): + return self._op_type == other._op_type and \ + self._id == other._id + + def __hash__(self): + return hash(self._op_type + str(self._id)) + + @staticmethod + def trim_string(string: str, length: int = -1): + """ + + Trim string to target length + :param string: Original string + :param length: Target length of string, -1 indicates original string. + :return: Trimmed string + """ + if string is None or not isinstance(string, str): + raise TypeError(f"Param string must be a string type but got {type(string)}.") + + if length <= -1 or len(string) <= length: + return string + + return string[:length] + + +class QueryGraphParser: + def __init__(self, rule_database_path: str): + self._fusion_rules: Dict[str, List[Tuple]] = dict() + self.load_database(rule_database_path) + self.num_rules = sum([len(v) for v in self._fusion_rules.values()]) + + @property + def fusion_rules(self): + return self._fusion_rules + + def load_database(self, rule_database): + if not os.path.isabs(rule_database): + rule_database = os.path.join(os.path.dirname(__file__), + "../", "../", + rule_database) + + if not os.path.exists(rule_database): + raise FileNotFoundError(f"Path {rule_database} does not exist.") + with open(rule_database, 'r') as f: + database = yaml.safe_load(f) + self.parse_yaml(database) + + def parse_yaml(self, yaml_database): + fusion_strategy_list = yaml_database.get("GraphFusion", []) + if yaml_database.get("UBFusion", []): + fusion_strategy_list.extend(yaml_database.get("UBFusion", [])) + for fusion_strategy in fusion_strategy_list: + if not isinstance(fusion_strategy, dict): + continue + (fusion_name, strategy), = fusion_strategy.items() + version = strategy.get("version", 0) + if version == 0 or version == "0": + self._fusion_rules[fusion_name] = self.build_query_graph_v0(fusion_name, + strategy.get('struct', [])) + elif version == 1 or version == "1": + self._fusion_rules[fusion_name] = self.build_query_graph_v1(fusion_name, + strategy.get('nodes', []), + strategy.get('edges', [])) + + @staticmethod + def build_query_graph_v0(graph_name: str, graph_struct: List[str]) -> List[Tuple]: + nodes = dict() + graphs = [] + edges = [] + + pre_node, next_node = None, None + for node in graph_struct: + pre_node = next_node + next_node = QueryGraphNode(node, graph_name) + nodes[next_node.op_name] = next_node + if pre_node is None or next_node is None: + continue + edges.append((pre_node, next_node,)) + graphs.append((nodes, edges, graph_name,)) + return graphs + + @staticmethod + def build_query_graph_v1(graph_name: str, + nodes_list: List[Dict], + edges_list: List[List[str]]) -> List[Tuple]: + graphs = [] + node_index = dict() + multi_node_list = [] + for index, node in enumerate(nodes_list): + (node_name, op_type), = node.items() + if isinstance(op_type, str): + op_type = [op_type] + multi_node_list.append([QueryGraphNode(op, graph_name) for op in op_type]) + node_index[node_name] = index + + multi_node = list(itertools.product(*multi_node_list)) + + for index, sub_nodes in enumerate(multi_node): + sub_graph_name = graph_name if index == 0 else f"{graph_name}#{index}" + sub_edge = [] + sub_node = dict() + for node in sub_nodes: + sub_node[node.op_name] = node + for edge in edges_list: + pre_node, next_node = edge + pre_node_index, next_node_index = node_index.get(pre_node), node_index.get(next_node) + sub_edge.append((sub_nodes[pre_node_index], sub_nodes[next_node_index])) + sub_graph = (sub_node, sub_edge, sub_graph_name,) + graphs.append(sub_graph) + return graphs diff --git a/profiler/advisor/dataset/__init__.py b/profiler/advisor/dataset/__init__.py index 9fac2c8eb3..e69de29bb2 100644 --- a/profiler/advisor/dataset/__init__.py +++ b/profiler/advisor/dataset/__init__.py @@ -1,6 +0,0 @@ -# import asight # noqa -# import asight.datasets.graph_dataset -# -# from .graph_dataset import GraphDataset as GraphD -# -# asight.datasets.graph_dataset.GraphDataset = GraphD diff --git a/profiler/advisor/dataset/graph_dataset.py b/profiler/advisor/dataset/graph_dataset.py new file mode 100644 index 0000000000..c6dd0448b4 --- /dev/null +++ b/profiler/advisor/dataset/graph_dataset.py @@ -0,0 +1,53 @@ +import logging +from typing import List + +from profiler.advisor.dataset.dataset import Dataset +from profiler.advisor.common.graph.graph_parser import HostGraphParser +from profiler.advisor.common.graph.graph import Graph +from profiler.advisor.utils.utils import load_parameter, lazy_property, get_file_path_from_directory + +logger = logging.getLogger() + + +class GraphDataset(Dataset): + """ + data directory dataset + """ + FILE_PATTERN = "ATT_ADVISOR_GRAPH_FILE" + + def __init__(self, collection_path, data: dict = None, **kwargs) -> None: + self.graph_files: List[HostGraphParser] = [] + super().__init__(collection_path, data) + + def _parse(self): + graph_list = get_file_path_from_directory(self.collection_path, + lambda file: file.endswith( + load_parameter(self.FILE_PATTERN, "_Build.txt"))) + + for graph_file_path in graph_list[-1:]: + logger.info("Prepare to parse %s as default graph.", graph_file_path) + graph_file = HostGraphParser(graph_file_path) + self.graph_files.append(graph_file) + return self.graph_files + + @lazy_property + def graphs(self) -> List[Graph]: + """ + get a list of graphs + return: List[Graph] + """ + graphs = [] + for parser in self.graph_files: + graph = Graph(nodes=parser.nodes, + edges=parser.edges, + name="Default") + graph.build() + graphs.append(graph) + graphs.sort(key=lambda g: g.name) + del self.graph_files[0] # remove previous useless data + return graphs + + @property + def is_empty(self) -> bool: + """check empty graph dataset""" + return len(self.graphs()) == 0 diff --git a/profiler/advisor/display/html/templates/fusion.html b/profiler/advisor/display/html/templates/fusion.html new file mode 100644 index 0000000000..605a9d748f --- /dev/null +++ b/profiler/advisor/display/html/templates/fusion.html @@ -0,0 +1,47 @@ +{% if candidates|length > 0 %} +
+

Fusion Issues

+
+
+ {% for node in candidates %} +
{{node.op_pass|safe}}
+
+ + + + + + + + + + + +
StructureCountsElapsed Time(us)
{{ node.fusion_pattern|safe }}{{ node.counts|safe }}{{ node.total_duration|safe }}
+
+ {% for match in node.matches %} +
SubGraph {{ loop.index|safe }}
+
+ + + + + + + {% for node in match %} + + + + + + {% endfor %} +
OP NameOP TypeElapsed Time(us)
{{ node.op_name|safe }}{{ node.dtype|safe }}{{ node.duration|safe }}
+
+ {% endfor %} +
+
+ {% endfor %} +
+
+
+{% endif %} diff --git a/profiler/advisor/interface/interface.py b/profiler/advisor/interface/interface.py index ebe20baa2d..c9f0f150fc 100644 --- a/profiler/advisor/interface/interface.py +++ b/profiler/advisor/interface/interface.py @@ -1,10 +1,11 @@ -from collections import OrderedDict import os +from collections import OrderedDict +from profiler.advisor.utils.utils import Timer from profiler.advisor.analyzer.computation.profiling_analyzer import ProfilingAnalyzer from profiler.advisor.analyzer.schedule.fusion_ops.fusion_ops_analyzer import TimelineFusionOpsAnalyzer +from profiler.advisor.analyzer.graph_fusion.graph_fusion_analyzer import FusionOPAnalyzer from profiler.advisor.common.analyzer_scopes import SupportedScopes -from profiler.advisor.utils.utils import Timer from profiler.advisor.analyzer.cluster.slow_rank_analyser import SlowRankAnalyzer from profiler.advisor.analyzer.cluster.slow_link_analyser import SlowLinkAnalyzer @@ -15,7 +16,8 @@ class Interface: SupportedScopes.TIMELINE_FUSION_OPS: TimelineFusionOpsAnalyzer }), "computation": OrderedDict({ - SupportedScopes.PORFILING_OPERATOR_ANALYSIS: ProfilingAnalyzer + SupportedScopes.PORFILING_OPERATOR_ANALYSIS: ProfilingAnalyzer, + SupportedScopes.GRAPH: FusionOPAnalyzer }), "communication": OrderedDict(), "overall": OrderedDict(), diff --git a/profiler/advisor/rules/op_fusion_pass.yaml b/profiler/advisor/rules/op_fusion_pass.yaml new file mode 100644 index 0000000000..3ff69a5782 --- /dev/null +++ b/profiler/advisor/rules/op_fusion_pass.yaml @@ -0,0 +1,491 @@ +Elementwise: &Elementwise [ Relu, Pow, Add, Sub, Mul, Div, Abs, Ceil, Log, Sqrt, Exp, LeakyRelu ] + +GraphFusion: + - FlashAttentionFusionPass: + version: 1 + nodes: + - node_1: [ BatchMatMulV2, BatchMatMul, MatMul, MatMulV2 ] + - node_2: [ Mul ] + - node_3: [ Softmax, SoftmaxV2 ] + - node_4: [ BatchMatMulV2, BatchMatMul, MatMul, MatMulV2 ] + + edges: + - [ node_1, node_2 ] + - [ node_2, node_3 ] + - [ node_3, node_4 ] + + - FlashAttentionFusionPass_V2: + version: 1 + nodes: + - node_1: [ BatchMatMulV2, BatchMatMul, MatMul, MatMulV2 ] + - node_2: [ Mul ] + - node_3: [ TransData ] + - node_4: [ Softmax, SoftmaxV2 ] + - node_5: [ BatchMatMulV2, BatchMatMul, MatMul, MatMulV2 ] + + edges: + - [ node_1, node_2 ] + - [ node_2, node_3 ] + - [ node_3, node_4 ] + - [ node_4, node_5 ] + + - BMMStridedSliceDGeluFusionPass: + version: 1 + nodes: + - node_1: [ BatchMatMulV2, BatchMatMul, MatMul, MatMulV2 ] + - node_2: [StridedSliceD] + - node_3: [Relu] + edges: + - [ node_1, node_2 ] + - [ node_2, node_3 ] + + - BMMConfusionTransposeDFusionPass: + version: 1 + nodes: + - node_1: [ BatchMatMulV2, BatchMatMul, MatMul, MatMulV2 ] + - node_2: [ ConfusionTransposeD ] + - node_3: [ Relu ] + edges: + - [ node_1, node_2 ] + - [ node_2, node_3 ] + + - BMMConfusionTransposeDFusionPass_V2: + version: 1 + nodes: + - node_1: [ BatchMatMulV2, BatchMatMul, MatMul, MatMulV2 ] + - node_2: [ ConfusionTransposeD ] + edges: + - [ node_1, node_2 ] + + - Conv2DAddGroupNormFusionPass: + version: 0 + struct: [ Conv2D, Add, GroupNorm ] + + - RMSnormAddFusionPass: + version: 0 + struct: [ RMSnorm, Add ] + + - ConvToFullyConnectionFusionPass: + version: 0 + struct: [ Conv ] + + - ZConcatv2dFusionPass: + version: 0 + struct: [ ConcatV2d, ConcatV2d ] + + - BatchMatMulReduceMeanFusionPass: + version: 1 + nodes: + - node_1: [ BatchMatMulV2, BatchMatMul, MatMul, MatMulV2 ] + - node_2: [ Add ] + - node_3: [ Relu ] + - node_4: [ ReduceMean ] + edges: + - [ node_1, node_2 ] + - [ node_2, node_3 ] + - [ node_3, node_4 ] + + - PadDepthwiseConv2dFusionPass: + version: 0 + struct: [ PadD, DepthwiseConv2D ] + + - ConvBatchnormFusionPass: + version: 1 + nodes: + - node_1: [ Conv2d, Conv3d, DepthwiseConv2d ] + - node_2: [ Batchnorm ] + + edges: + - [ node_1, node_2 ] + + - AConv2dMulFusion: + version: 1 + nodes: + - node_1: [ Conv2d, Conv3d ] + - node_2: [ Mul ] + + edges: + - [ node_1, node_2 ] + + - TBEConvAddFusion: + version: 1 + nodes: + - node_1: [ Conv2d, Conv3d ] + - node_2: [ Add ] + + edges: + - [ node_1, node_2 ] + + - ZBNupdateReluV2Conv2DBNreducePass: + version: 0 + struct: [ BNTrainingUpdate, ReluV2, Conv2D, BNTrainingReduce ] + + - ASplitConv2dConcatPass: + version: 1 + nodes: + - node_1: [ MatMul, MatMulV2, BatchMatMul, BatchMatMulV2 ] + - node_2: [ Cast ] + + edges: + - [ node_1, node_2 ] + + - MatMulBiasAddFusionPass: + version: 1 + nodes: + - node_1: [ MatMul, MatMulV2, BatchMatMul, BatchMatMulV2 ] + - node_2: [ BiasAdd, Add ] + + edges: + - [ node_1, node_2 ] + + - Conv2DbpInputBiasAddFusionPass: + version: 0 + struct: [ Conv2DBackpropInput, BiasAdd ] + + - BatchMatmulV2ReduceFusionPass: + version: 0 + struct: [ BatchMatMulV2, ReduceSumD ] + + - BatchMatmulV2ReduceFusionPass_V2: + version: 0 + struct: [ BatchMatMulV2, Cast, ReduceSumD ] + + - Conv3DbpInputBiasAddFusionPass: + version: 0 + struct: [ Conv3DBackpropInputD, BiasAdd ] + + - AFullyConnectionReshapePass: + version: 0 + struct: [ FullyConnection, Reshape ] + + - GemmTransFusionPass: + version: 0 + struct: [ Transpose, Gemm ] + + - Resnet50DbnDwFusionPass: + version: 0 + struct: [ BNTrainingReduceGrad, Conv2DBackpropFilterD ] + + - CastReluCastFusionPass: + version: 0 + struct: [ Cast, Relu, Cast ] + + - PadConv2dFusionPass: + version: 1 + nodes: + - node_1: [ PadD, PadDV3 ] + - node_2: [ Conv2D ] + + edges: + - [ node_1, node_2 ] + + - Conv2DTransposeBatchnormFusionPass: + version: 1 + nodes: + - node_1: [ Conv2dTranspose ] + - node_2: [ BatchNorm, BNInference ] + + edges: + - [ node_1, node_2 ] + + - AvgPoolV2GradFusionPass: + version: 0 + struct: [ AvgPooV2lGrad ] + + - DropOutDoMaskFusionPass: + version: 0 + struct: [ DropOutDoMaskV3D ] + + - ConvCastFusionPass: + version: 0 + struct: [ Conv2D, Cast ] + + - ConvCastFusionPass_V2: + version: 0 + struct: [ Conv2D, TransData, Cast ] + + - StridedSliceConcatFusionPass: + version: 1 + nodes: + - node_1: [ StridedSliceD ] + - node_2: [ StridedSliceD ] + - node_3: [ ConcatD ] + + edges: + - [ node_1, node_3 ] + - [ node_2, node_3 ] + + - ConvCastFusionPass: + version: 0 + struct: [ SplitV ] + + - AInplaceAddFusionPass: + version: 0 + struct: [ InplaceAdd ] + + - AInplaceSubFusionPass: + version: 0 + struct: [ InplaceSub ] + + - AInplaceUpdateFusionPass: + version: 0 + struct: [ InplaceUpdate ] + +UBFusion: + - TbeConv3dElemwisePass: + version: 1 + nodes: + - node_1: [ Conv3D ] + - node_2: *Elementwise + edges: + - [ node_1, node_2 ] + + - TbeConv3dDxElemwisePass: + version: 0 + struct: [ Conv3dBackpropInput, AddN, LeakyReluGrad ] + + - TbeConv3dDxElemwisePass_V2: + version: 0 + struct: [ Conv3dBackpropInput, LeakyReluGrad ] + + - MatMulDropoutDoMaskV3dFusionPass: + version: 0 + struct: [ MatMul, Dropout_do_mask_v3_d, Add ] + + - BatchMatMulDropoutDoMaskV3dFusionPass: + version: 0 + struct: [ BatchMatMul, Dropout_do_mask_v3_d, Add ] + + - MatmulReduceSumUbFusion: + version: 0 + struct: [ BatchMatMul, ReduceSum ] + + - TbeBatchMatMulElementWiseFusionPass: + version: 1 + nodes: + - node_1: [ BatchMatMul, GEMM ] + - node_2: *Elementwise + + edges: + - [ node_1, node_2 ] + + - ATbeMatMulElemwiseFusionPass: + version: 1 + nodes: + - node_1: [ MatMul, GEMM ] + - node_2: *Elementwise + + edges: + - [ node_1, node_2 ] + + - MatmulConfusiontransposeUbFusion: + version: 0 + struct: [ MatMul, matmul_transpose ] + + - TbeFullyconnectionElemwiseDequantFusionPass: + version: 1 + nodes: + - node_1: [ BatchMatMul, MatMul, FullyConnection ] + - node_2: *Elementwise + + edges: + - [ node_1, node_2 ] + + - BatchMatmulConfusiontransposeUbFusion: + version: 0 + struct: [ BatchMatMul, batchmatmul_transpose ] + + - TbeConvSigmoidMulQuantFusionPass: + version: 1 + nodes: + - node_1: [ Conv ] + - node_2: [ Sigmoid ] + - node_3: [ Mul ] + - node_4: [ Quant ] + + edges: + - [ node_1, node_2 ] + - [ node_1, node_3 ] + - [ node_2, node_3 ] + - [ node_3, node_4 ] + + - TbeConv2DReluv2Pass: + version: 0 + struct: [ Conv2D, ReluV2 ] + + - TbeConvDoubleInFusionPass: + version: 1 + nodes: + - node_1: [ Conv2D ] + - node_2: *Elementwise + - node_3: *Elementwise + edges: + - [ node_1, node_2 ] + - [ node_2, node_3 ] + + - TbeConv2dAddClipMulDivFusionPass: + version: 0 + struct: [ Conv2D, Add, Clip, Mul, Div ] + + - TbeConv2dAddClipMulDivFusionPass_V2: + version: 0 + struct: [ Conv2D, Add, Clip, Mul ] + + - TbeConv2dAddRelu6MulMulFusionPass: + version: 1 + nodes: + - node_1: [ Conv2D, DepthwiseConv2D ] + - node_2: [ Add ] + - node_3: [ Relu6 ] + - node_4: [ Mul ] + - node_5: [ Mul ] + + edges: + - [ node_1, node_2 ] + - [ node_2, node_3 ] + - [ node_3, node_4 ] + - [ node_4, node_5 ] + + - ConvClipByValueFusionPass: + version: 1 + nodes: + - node_1: [ Conv2D ] + - node_2: *Elementwise + edges: + - [ node_1, node_2 ] + + - TbeAippConvReluMaxpoolingFusion: + version: 1 + nodes: + - node_1: [ Conv2D ] + - node_2: *Elementwise + - node_3: [ MaxPool, MaxPoolv3 ] + + edges: + - [ node_1, node_2 ] + - [ node_2, node_3 ] + + - TbeReduceElemwiseFusionPass: + version: 1 + nodes: + - node_1: *Elementwise + - node_2: [ CommReduce ] + edges: + - [ node_1, node_2 ] + + - TbeReadSelectEltwiseFusionPass: + version: 1 + nodes: + - node_1: [ ReadSelect ] + - node_2: *Elementwise + + edges: + - [ node_1, node_2 ] + + - TbeEltwiseWriteSelectFusionPass: + version: 1 + nodes: + - node_1: *Elementwise + - node_2: [ write_select ] + + edges: + - [ node_1, node_2 ] + + - TbeEltwiseFusionPass: + version: 1 + nodes: + - node_1: *Elementwise + - node_2: *Elementwise + + edges: + - [ node_1, node_2 ] + + - TbeConvBnreduceFusionPass: + version: 0 + struct: [ Convolution, bn_reduce ] + + - TbeBnupdateEltwiseFusionPass: + version: 1 + nodes: + - node_1: [ bn_update ] + - node_2: *Elementwise + edges: + - [ node_1, node_2 ] + + - TbeConv2DBackpropElemwiseFusionPass: + version: 1 + nodes: + - node_1: [ Conv2DBackpropInputD, Conv2DTransposeD, Deconvolution ] + - node_2: [ Add, ReluGradV2 ] + + edges: + - [ node_1, node_2 ] + + - TbeDxElemwisePass: + version: 1 + nodes: + - node_1: [ Conv2DBackpropInputD, Conv2DTransposeD, Deconvolution ] + - node_2: [ LeakyRelu, Prelu ] + + edges: + - [ node_1, node_2 ] + + - TbeConv2dBackpropRequantFusionPass: + version: 1 + nodes: + - node_1: [ Conv2DBackpropInputD, Conv2DTransposeD, Deconvolution ] + - node_2: [ AscendRequant ] + + edges: + - [ node_1, node_2 ] + + - TbeDwTransdataFusionPass: + version: 1 + nodes: + - node_1: [ Transdate ] + - node_2: [ Transdate ] + - node_3: [ Conv2DBackpropFilter ] + + edges: + - [ node_1, node_3 ] + - [ node_2, node_3 ] + + - TbeDxTransdataFusionPass: + version: 1 + nodes: + - node_1: [ Transdate ] + - node_2: [ Transdate ] + - node_3: [ Conv2DBackpropInput ] + + edges: + - [ node_1, node_3 ] + - [ node_2, node_3 ] + + - TbeEltwiseCastFusionPass: + version: 1 + nodes: + - node_1: [ Relu, Add, Mul, Sqrt ] + - node_2: [ Cast ] + + edges: + - [ node_1, node_2 ] + + - TbeEltwiseCastFusionPass_V2: + version: 1 + nodes: + - node_1: [ Cast ] + - node_2: [ Relu, Add, Mul, Sqrt ] + + + edges: + - [ node_1, node_2 ] + + - TbeConv2DBackpropDequantFusionPass: + version: 1 + nodes: + - node_1: [ Conv2DBackpropInputD, Conv2DTransposeD, Deconvolution ] + - node_2: [ AscendDequant ] + + + edges: + - [ node_1, node_2 ] diff --git a/profiler/cli/__init__.py b/profiler/cli/__init__.py index 4666e2d531..2cba173de1 100644 --- a/profiler/cli/__init__.py +++ b/profiler/cli/__init__.py @@ -1,4 +1,4 @@ from profiler.advisor.config.config import Config from profiler.advisor.utils.utils import Timer -Config().set_log_path(f"ma_advisor_{Timer().strftime}.xlsx") +Config().set_log_path(f"att_advisor_{Timer().strftime}.xlsx") diff --git a/profiler/cli/entrance.py b/profiler/cli/entrance.py index 1164a78cd1..8bc5a364ac 100644 --- a/profiler/cli/entrance.py +++ b/profiler/cli/entrance.py @@ -54,6 +54,6 @@ advisor_cli.add_command(compare_cli, name="compare") if __name__ == '__main__': advisor_cli.main( ["analyze", "all", "-d", - r"C:/xxx/profiling_data", + r"C:\Users\admin\Downloads\llama-13B\llama-13b", ] ) diff --git a/profiler/cluster_analyse/common_func/db_manager.py b/profiler/cluster_analyse/common_func/db_manager.py index 039473d707..47dd30f0db 100644 --- a/profiler/cluster_analyse/common_func/db_manager.py +++ b/profiler/cluster_analyse/common_func/db_manager.py @@ -15,6 +15,9 @@ import os import sqlite3 +import sys + +sys.path.append("../../") from common_func.constant import Constant from common_func.empty_class import EmptyClass diff --git a/profiler/test/tools/tool.py b/profiler/test/tools/tool.py index ee4b6f9bb1..18699951fa 100644 --- a/profiler/test/tools/tool.py +++ b/profiler/test/tools/tool.py @@ -20,7 +20,7 @@ def recover_env(work_path="./"): if os.path.exists("./tune_ops_file.cfg"): os.remove("./tune_ops_file.cfg") - delete_file(r"ma_advisor_+", work_path) + delete_file(r"att_advisor_+", work_path) def run_command(cmd): -- Gitee From faa848f0f6be5980cb895935488b6a6a566032cc Mon Sep 17 00:00:00 2001 From: fanxiaotong Date: Fri, 17 May 2024 17:43:27 +0800 Subject: [PATCH 14/21] advisor --- .../computation/npu_fused/__init__.py | 14 -- .../npu_fused/compute_advice_base.py | 118 ---------- .../computation/npu_fused/csv_analyzer.py | 81 ------- .../computation/npu_fused/json_analyzer.py | 55 ----- .../computation/npu_fused/npu_fused_advice.py | 113 ---------- .../computation/npu_fused/npu_slow_advice.py | 124 ---------- .../analyzer/computation/npu_fused/op_perf.py | 193 ---------------- profiler/advisor/common/constant.py | 211 ------------------ profiler/advisor/common/trace_view_json.py | 209 ----------------- .../advisor/common/trace_view_preprocessor.py | 208 ----------------- profiler/advisor/interface/interface.py | 1 - 11 files changed, 1327 deletions(-) delete mode 100644 profiler/advisor/analyzer/computation/npu_fused/__init__.py delete mode 100644 profiler/advisor/analyzer/computation/npu_fused/compute_advice_base.py delete mode 100644 profiler/advisor/analyzer/computation/npu_fused/csv_analyzer.py delete mode 100644 profiler/advisor/analyzer/computation/npu_fused/json_analyzer.py delete mode 100644 profiler/advisor/analyzer/computation/npu_fused/npu_fused_advice.py delete mode 100644 profiler/advisor/analyzer/computation/npu_fused/npu_slow_advice.py delete mode 100644 profiler/advisor/analyzer/computation/npu_fused/op_perf.py delete mode 100644 profiler/advisor/common/trace_view_json.py delete mode 100644 profiler/advisor/common/trace_view_preprocessor.py diff --git a/profiler/advisor/analyzer/computation/npu_fused/__init__.py b/profiler/advisor/analyzer/computation/npu_fused/__init__.py deleted file mode 100644 index 8400fd5ecd..0000000000 --- a/profiler/advisor/analyzer/computation/npu_fused/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/profiler/advisor/analyzer/computation/npu_fused/compute_advice_base.py b/profiler/advisor/analyzer/computation/npu_fused/compute_advice_base.py deleted file mode 100644 index 3916de201b..0000000000 --- a/profiler/advisor/analyzer/computation/npu_fused/compute_advice_base.py +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from abc import abstractmethod -from collections import defaultdict -import os - -from profiler.cluster_analyse.common_func.file_manager import FileManager -from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer -from profiler.advisor.common import constant as const - - -class ComputeAdviceBase(BaseAnalyzer): - ASCEND_PT = 'ascend_pt' - ASCEND_PROFILER_OUTPUT = 'ASCEND_PROFILER_OUTPUT' - KERNEL_DETAIL_FILE = "kernel_details.csv" - TRACE_VIEW_FILE = "trace_view.json" - - def __init__(self, collection_path: str, n_processes: int = 1, cann_version=const.DEFAULT_CANN_VERSION, - torch_version=const.DEFAULT_TORCH_VERSION, **kwargs): - super().__init__(collection_path, n_processes, cann_version, torch_version, **kwargs) - self.kernel_details_path = "" - self.has_preparse = False - self.preparse_data = defaultdict(list) - self.call_stack = None - self.trace_view_path = "" - - def path_check(self): - """ - check whether input path is valid - """ - if not os.path.exists(self.collection_path): - print("[ERROR] Path: {} is not exist.".format(self.collection_path)) - return False - if os.path.isdir(self.collection_path) and self.collection_path.endswith("ascend_pt"): - self.kernel_details_path = os.path.join(self.collection_path, "ASCEND_PROFILER_OUTPUT", - "kernel_details.csv") - if not os.path.exists(self.kernel_details_path): - print("[ERROR] kernel_details.csv is not exist in the Path: {}.".format( - os.path.join(self.collection_path, "ASCEND_PROFILER_OUTPUT"))) - return False - elif os.path.isfile(self.collection_path) and os.path.basename(self.collection_path) == "kernel_details.csv": - self.kernel_details_path = self.collection_path - else: - print("[ERROR] Please input ascend_pt or kernel_details.csv") - return False - print("[INFO] Start to analyse the target file: {}".format(self.kernel_details_path)) - self.preparse() - return True - - def has_callstack(self): - if self.call_stack is not None: - return self.call_stack - profiler_info_json_path = "" - for file in os.listdir(self.collection_path): - if file.startswith("profiler_info"): - profiler_info_json_path = os.path.join(self.collection_path, file) - break - if not profiler_info_json_path: - self.call_stack = False - return self.call_stack - self.trace_view_path = os.path.join(self.collection_path, self.ASCEND_PROFILER_OUTPUT, "trace_view.json") - if not os.path.exists(profiler_info_json_path) or not os.path.exists(self.trace_view_path): - self.call_stack = False - return self.call_stack - info = FileManager.read_json_file(profiler_info_json_path) - if not info.get("config") or not info.get("config").get("common_config") \ - or not info.get("config").get("common_config").get("with_stack"): - self.call_stack = False - return self.call_stack - activities = info.get("config").get("common_config").get("activities") - if not activities or "ProfilerActivity.CPU" not in activities: - self.call_stack = False - return self.call_stack - self.call_stack = info.get("config").get("common_config").get("with_stack") - return self.call_stack - - @abstractmethod - def run(self): - """ - analyze profiling data and advice - """ - - def output(self): - """ - output relevant data - """ - self.output_format_data[self.DATA] = self.cur_data - self.output_format_data[self.BOTTLENECK] = self.cur_bottleneck - self.output_format_data[self.ADVICE] = self.cur_advice - - def preparse(self): - if self.has_preparse: - return - - def optimize(self): - pass - - def make_record(self): - """ - make record for what and how to optimize - """ - pass - - def make_render(self): - pass diff --git a/profiler/advisor/analyzer/computation/npu_fused/csv_analyzer.py b/profiler/advisor/analyzer/computation/npu_fused/csv_analyzer.py deleted file mode 100644 index c85c14d618..0000000000 --- a/profiler/advisor/analyzer/computation/npu_fused/csv_analyzer.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import multiprocessing - -import pandas as pd -import numpy as np - -from common_func_advisor.constant import Constant -from .op_perf import OpPerfFactory - - -class CSVAnalyzer: - def __init__(self, path) -> None: - self._path = path - - def process(self): - df = pd.read_csv(self._path, dtype={"Start Time(us)": str}) - # 分析是否存在可融合的算子 - op_type_list = df["Type"].tolist() - duration_list = df["Duration(us)"].tolist() - start_times = df["Start Time(us)"].tolist() - # 去除末尾的\t分隔符 - start_times = [start_time[:-1] for start_time in start_times] - result_list = [] - for pattern in Constant.PATTERN_DICT.keys(): - result_list.extend(self.find_all_sub_lists(op_type_list, duration_list, start_times, pattern)) - data_frame = pd.DataFrame(result_list) - data_frame.columns = ["pattern_name", "pattern", "len", "count", "duration sum(us)", "op durations(us)", - "index", "first_timestamp"] - return data_frame - - @staticmethod - def find_all_sub_lists(op_type_list, duration_list, start_times, expect_sub_list): - # 创建一个空字典,用来存储子列表和它们的出现次数和起始位置 - len_sub_list = len(expect_sub_list) - expect_sub_list = tuple(expect_sub_list) - sublist_dict = {} - # 遍历列表,从每个位置开始,取长度为N的子列表 - for i in range(len(op_type_list) - len_sub_list + 1): - sublist = tuple(op_type_list[i:i + len_sub_list]) - if sublist != expect_sub_list: - continue - # 如果子列表已经在字典中,就增加它的出现次数,否则就初始化为1 - if sublist in sublist_dict: - # count - sublist_dict[sublist][0] += 1 - # index - sublist_dict[sublist][1].append(i) - # total duration - sublist_dict[sublist][2] += sum(duration_list[i:i + len_sub_list]) - # duration - zip_data = zip(sublist_dict[sublist][3], duration_list[i:i + len_sub_list]) - sublist_dict[sublist][3] = [a + b for a, b in zip_data] - else: - sublist_dict[sublist] = [1, [i], sum(duration_list[i:i + len_sub_list]), - duration_list[i:i + len_sub_list], len_sub_list, start_times[i]] - # 创建一个空列表,用来存储所有重复的子列表 - repeated_sublists = [] - for sublist, (count, index, duration_sum, op_durations, sublist_len, first_time) in sublist_dict.items(): - pattern_name = Constant.PATTERN_DICT.get(sublist, "unknown") - op_durations = [round(num, 2) for num in op_durations] - repeated_sublists.append([pattern_name, sublist, sublist_len, count, - duration_sum, op_durations, index, first_time]) - if len(sublist_dict) == 0: - pattern_name = Constant.PATTERN_DICT.get(expect_sub_list, "unknown") - repeated_sublists.append([pattern_name, expect_sub_list, 0, 0, 0, 0, 0, 0]) - # 返回所有重复的子列表 - return repeated_sublists diff --git a/profiler/advisor/analyzer/computation/npu_fused/json_analyzer.py b/profiler/advisor/analyzer/computation/npu_fused/json_analyzer.py deleted file mode 100644 index fd2a72ffa3..0000000000 --- a/profiler/advisor/analyzer/computation/npu_fused/json_analyzer.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (c) 2024, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pandas as pd - -from common_func_advisor.trace_view_json import TraceViewJson - - -class JSONAnalyzer(object): - def __init__(self, path): - self._path = path - - def get_custom_code(self, data: pd.DataFrame, ts_col: str, output_col: str): - trace_json = TraceViewJson(self._path) - callstacks = pd.DataFrame(columns=[output_col]) - - for i, row in data.iterrows(): - if ts_col not in data.columns.tolist(): - print("[ERROR] No {} col found in data columns.".format(ts_col)) - return callstacks - timestamp = row[ts_col] - flow_event = trace_json.get_torch_2_npu_flow_event(timestamp) - if not flow_event.valid(): - print("[ERROR] Get flow event failed for pattern {}.".format(row['pattern'])) - callstacks.loc[i] = "" - continue - flow_event_s_key = flow_event.s_point_ts - python_dur_events = trace_json.get_python_dur_events_contain_ts(flow_event_s_key) - if not python_dur_events: - print("[ERROR] No python dur event found for pattern {}.".format(row['pattern'])) - callstacks.loc[i] = "" - continue - # 保持新老版本callstack兼容性 - if python_dur_events[0].args.get("Call stack"): - # 旧版本 - callstack = python_dur_events[0].args.get("Call stack").split(";") - else: - python_dur_events.sort(key=lambda e: e.ts) - # 新版本 - callstack = [event.name for event in python_dur_events if event.cat == "python_function"] - callstack_str = "\n".join(callstack) - callstacks.loc[i] = callstack_str - return callstacks diff --git a/profiler/advisor/analyzer/computation/npu_fused/npu_fused_advice.py b/profiler/advisor/analyzer/computation/npu_fused/npu_fused_advice.py deleted file mode 100644 index 7ec711d800..0000000000 --- a/profiler/advisor/analyzer/computation/npu_fused/npu_fused_advice.py +++ /dev/null @@ -1,113 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -from abc import ABC - -import pandas as pd - -from profiler.advisor.analyzer.computation.npu_fused.compute_advice_base import ComputeAdviceBase -from profiler.advisor.analyzer.computation.npu_fused.csv_analyzer import CSVAnalyzer -from profiler.advisor.analyzer.computation.npu_fused.json_analyzer import JSONAnalyzer -from profiler.advisor.common import constant as const -from profiler.advisor.result.item import OptimizeItem, OptimizeRecord - - -class NpuFusedAdvice(ComputeAdviceBase, ABC, ): - NPU_FUSED_ADVICE = "npu_fused_advice" - - def __init__(self, collection_path: str, n_processes: int = 1, cann_version=const.DEFAULT_CANN_VERSION, - torch_version=const.DEFAULT_TORCH_VERSION, **kwargs): - super().__init__(collection_path, n_processes, cann_version, torch_version, **kwargs) - self.cur_data = dict() - self.cur_bottleneck = str() - self.cur_advice = str() - self.kernel_details_path = "" - self.call_stack = None - - def run(self): - if not self.path_check(): - return self.output_format_data - self.process() - self.output() - return self.output_format_data - - def process(self): - csv_analyzer = CSVAnalyzer(self.kernel_details_path) - all_pattern_data = csv_analyzer.process() - all_pattern_data = all_pattern_data.sort_values(by='duration sum(us)', ascending=False) - filter_data = all_pattern_data.get(all_pattern_data.get("duration sum(us)", 0) > 0) - if not self.has_callstack(): - print("[Warning] No call stack info found, advice will be incomplete") - self.cur_data = filter_data - else: - json_analyzer = JSONAnalyzer(self.trace_view_path) - custom_code = json_analyzer.get_custom_code(filter_data, "first_timestamp", "custom code") - self.cur_data = pd.concat([filter_data, custom_code], axis=1) - op_num = len(self.cur_data.index) - op_dur = filter_data["duration sum(us)"].sum() - if op_num > 0: - index = 0 - self.cur_bottleneck = f"The computing time of fusable op is {round(op_dur, 2)} ms." - self.cur_advice = "" - for _, row in self.cur_data.iterrows(): - advice = f"Advice {index}:\n" - cur_op = "[" + ", ".join(row.loc["pattern"]) + "]" - npu_fused_op = row.loc["pattern_name"] - advice += f"Replace {cur_op} with {npu_fused_op}. " - if self.call_stack: - advice += f"This pattern first happened in: \n{row['custom code']}" - if index != op_num - 1: - advice += "\n" - index += 1 - self.cur_advice += advice - - def optimize(self): - if not self.path_check(): - return self.output_format_data - self.process() - self.output() - return self.output_format_data - - def make_record(self): - """ - make record for what and how to optimize - """ - optimization_item = OptimizeItem( - NpuFusedAdvice.NPU_FUSED_ADVICE, - self.bottleneck_str, - self.cur_advices - ) - self.result.add(OptimizeRecord(optimization_item)) - - # self.result.add_detail(const.BOTTLENECK, self.bottleneck_table["headers"], self.bottleneck_table["data"][0]) - # for data_type, data in self.cur_data.items(): - # if data: - # self.result.add_detail(const.DATA + data_type, self.cur_data_table[data_type]["headers"], self.cur_data_table[data_type]["data"][0]) - - def make_render(self): - result_for_html = { - "Description" : self.cur_bottleneck, - "suggestion" : self.cur_advice, - "details" : [{}] - } - - self.html_render.render_template(key="cluster", - title=NpuFusedAdvice.NPU_FUSED_ADVICE, - template_dir="templates", - template_name="cluster_analysis.html", - cann_version=self.cann_version, - torch_version=self.torch_version, - result=result_for_html) diff --git a/profiler/advisor/analyzer/computation/npu_fused/npu_slow_advice.py b/profiler/advisor/analyzer/computation/npu_fused/npu_slow_advice.py deleted file mode 100644 index 0dd0a3225f..0000000000 --- a/profiler/advisor/analyzer/computation/npu_fused/npu_slow_advice.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from abc import ABC -import multiprocessing - -import pandas as pd - -from profiler.advisor.analyzer.computation.npu_fused.compute_advice_base import ComputeAdviceBase -from profiler.advisor.analyzer.computation.npu_fused.op_perf import OpPerfFactory -from profiler.advisor.common import constant as const -from profiler.advisor.common.constant import PerfColor -from profiler.advisor.common.trace_view_json import TraceViewJson -from profiler.advisor.common.constant import Constant -from profiler.advisor.display.html.render import HTMLRender -from profiler.advisor.result.result import OptimizeResult -from profiler.advisor.result.item import OptimizeItem, OptimizeRecord - - -class NpuSlowAnalyzer(ComputeAdviceBase, ABC): - OP_PERF_SHEET = "op_perf" - npu_slow_advice = "NPU_SLOW_ADVICE" - - def __init__(self, collection_path: str, n_processes: int = 1, cann_version=const.DEFAULT_CANN_VERSION, - torch_version=const.DEFAULT_TORCH_VERSION, **kwargs): - super().__init__(collection_path, n_processes, cann_version, torch_version, **kwargs) - self.kernel_details_path = "" - self.data = pd.DataFrame() - - @staticmethod - def save_to_excel(data: pd.DataFrame, file_path: str) -> None: - writer = pd.ExcelWriter(file_path, engine="xlsxwriter", mode="w") - data.index.name = Constant.TITLE.INDEX - data.to_excel(writer, index=True, sheet_name=NpuSlowAnalyzer.OP_PERF_SHEET) - NpuSlowAnalyzer.color_sheet(data, writer.book, writer.sheets[NpuSlowAnalyzer.OP_PERF_SHEET]) - writer.sheets[NpuSlowAnalyzer.OP_PERF_SHEET].freeze_panes = "A2" - writer.close() - - @staticmethod - def color_sheet(data: pd.DataFrame, workbook, worksheet): - color_rgb = { - PerfColor.GREEN.name: workbook.add_format({'bg_color': '#C6EFCE'}), - PerfColor.YELLOW.name: workbook.add_format({'bg_color': '#FFEB9C'}), - PerfColor.RED.name: workbook.add_format({'bg_color': '#FFC7CE'}), - } - for row in data.iterrows(): - color = row[1][Constant.TITLE.COLOR] - fill_format = color_rgb.get(color) - if not fill_format: - continue - worksheet.set_row(row[0] + 1, None, fill_format) - - @staticmethod - def update_op_row(row: tuple): - return OpPerfFactory.build(row[1]).update() - - def get_call_stack(self, data: pd.DataFrame, index_id: int, ts_col: str) -> str: - if not self.has_callstack(): - print("There is no call stack info, please set 'with_stack=True'") - return "" - trace_json = TraceViewJson(self.trace_view_path) - return trace_json.get_call_stack(data, index_id, ts_col) - - def run(self): - if not self.path_check(): - return self.data - self.process() - return self.data - - def process(self): - self.data = pd.read_csv(self.kernel_details_path, dtype={"Start Time(us)": str}) - # 去除末尾的\t分隔符 - self.data["Start Time(us)"] = self.data["Start Time(us)"].apply(lambda x: x[:-1]) - pool = multiprocessing.Pool(multiprocessing.cpu_count()) - result = pool.map(self.update_op_row, self.data.iterrows()) - pool.close() - self.data = pd.DataFrame(result) - - def optimize(self): - if not self.path_check(): - return self.data - self.process() - - def make_record(self): - """ - make record for what and how to optimize - """ - optimization_item = OptimizeItem( - NpuSlowAnalyzer.npu_slow_advice, - self.bottleneck_str, - self.cur_advices - ) - self.result.add(OptimizeRecord(optimization_item)) - - # self.result.add_detail(const.BOTTLENECK, self.bottleneck_table["headers"], self.bottleneck_table["data"][0]) - # for data_type, data in self.cur_data.items(): - # if data: - # self.result.add_detail(const.DATA + data_type, self.cur_data_table[data_type]["headers"], self.cur_data_table[data_type]["data"][0]) - - def make_render(self): - result_for_html = { - "Description" : self.cur_bottleneck, - "suggestion" : self.cur_advice, - "details" : [{}] - } - - self.html_render.render_template(key="cluster", - title=NpuSlowAnalyzer.npu_slow_advice, - template_dir="templates", - template_name="cluster_analysis.html", - cann_version=self.cann_version, - torch_version=self.torch_version, - result=result_for_html) diff --git a/profiler/advisor/analyzer/computation/npu_fused/op_perf.py b/profiler/advisor/analyzer/computation/npu_fused/op_perf.py deleted file mode 100644 index dbcaad8c1c..0000000000 --- a/profiler/advisor/analyzer/computation/npu_fused/op_perf.py +++ /dev/null @@ -1,193 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import functools -from typing import Dict -from profiler.advisor.common.constant import Constant, CoreType, PerfColor - - -class OpPerfFactory: - @classmethod - def build(cls, op_row: Dict): - if op_row.get(Constant.TITLE.TASK_TYPE) == CoreType.AIV: - return VecOpPerf(op_row) - elif op_row.get(Constant.TITLE.TASK_TYPE) == CoreType.AIC: - return CubeOpPerf(op_row) - else: - return OpPerf(op_row) - - -class OpPerf: - def __init__(self, op_row: Dict): - if "OP Type" in op_row.keys(): - Constant.update_title() - self.row = op_row - self.model_name = op_row.get("Model Name") - self.model_id = op_row.get("Model ID") - self.task_id = op_row.get("Task ID") - self.stream_id = op_row.get("Stream ID") - self.infer_id = op_row.get("Infer ID") - self.op_name = op_row.get("Name") - self.op_type = op_row.get("Type") - self.task_type = op_row.get("Accelerator Core") - self.task_start_time = op_row.get("Start Time(us)") - self.task_duration = op_row.get("Duration(us)") - self.task_wait_time = op_row.get("Wait Time(us)") - self.block_dim = op_row.get("Block Dim") - self.mix_block_dim = op_row.get("Mix Block Dim") - - self.hf32_eligible = op_row.get("HF32 Eligible") - self.input_shapes = op_row.get("Input Shapes") - self.input_data_types = op_row.get("Input Data Types") - self.input_formats = op_row.get("Input Formats") - self.output_shapes = op_row.get("Output Shapes") - self.output_data_types = op_row.get("Output Data Types") - self.output_formats = op_row.get("Output Formats") - self.context_id = op_row.get("Context ID") - self.aicore_time = op_row.get("aicore_time(us)") - self.aic_total_cycles = op_row.get("aic_total_cycles") - - self.aic_mac_time = op_row.get("aic_mac_time(us)") - self.aic_mac_ratio = op_row.get("aic_mac_ratio") - self.aic_scalar_time = op_row.get("aic_scalar_time(us)") - self.aic_scalar_ratio = op_row.get("aic_scalar_ratio") - self.aic_mte1_time = op_row.get("aic_mte1_time(us)") - self.aic_mte1_ratio = op_row.get("aic_mte1_ratio") - self.aic_mte2_time = op_row.get("aic_mte2_time(us)") - self.aic_mte2_ratio = op_row.get("aic_mte2_ratio") - self.aic_fixpipe_time = op_row.get("aic_fixpipe_time(us)") - self.aic_fixpipe_ratio = op_row.get("aic_fixpipe_ratio") - self.aic_icache_miss_rate = op_row.get("aic_icache_miss_rate") - self.aiv_time = op_row.get("aiv_time(us)") - self.aiv_total_cycles = op_row.get("aiv_total_cycles") - self.aiv_vec_time = op_row.get("aiv_vec_time(us)") - self.aiv_vec_ratio = op_row.get("aiv_vec_ratio") - self.aiv_scalar_time = op_row.get("aiv_scalar_time(us)") - self.aiv_scalar_ratio = op_row.get("aiv_scalar_ratio") - self.aiv_mte2_time = op_row.get("aiv_mte2_time(us)") - - self.aiv_mte2_ratio = op_row.get("aiv_mte2_ratio") - self.aiv_mte3_time = op_row.get("aiv_mte3_time(us)") - self.aiv_mte3_ratio = op_row.get("aiv_mte3_ratio") - self.aiv_icache_miss_rate = op_row.get("aiv_icache_miss_rate") - self.cube_utilization = op_row.get("cube_utilization( %)") - - @staticmethod - def get_dtype_size(dtype_str: str): - return Constant.DTYPE_SIZE_MAP.get(dtype_str.lower(), 0) - - @staticmethod - def get_element_count(shape: list): - return functools.reduce(lambda x, y: int(x) * int(y), shape) - - @staticmethod - def shape_to_tuple(shape_str: str) -> tuple: - if not isinstance(shape_str, str): - return [] - shape_str = shape_str.strip('"') - split_shape = shape_str.strip(';') - if not split_shape: - return [] - pairs = split_shape.split(';') - shape_result = [] - for pair in pairs: - pair = pair.strip(";") - elements = pair.split(',') - elements = tuple(int(element) if "" != element else 0 for element in elements) - shape_result.append(elements) - return tuple(shape_result) - - @staticmethod - def dtype_to_tuple(dtypes_str: str) -> tuple: - if not isinstance(dtypes_str, str): - return [] - dtypes_str = dtypes_str.strip('"') - split_dtypes = dtypes_str.strip(';') - if not split_dtypes: - return [] - pairs = split_dtypes.split(';') - return tuple(pairs) - - def get_mac_ratio(self): - return self.aic_mac_ratio - - def get_size(self, shapes_str, dtypes_str): - shapes = self.shape_to_tuple(shapes_str) - dtypes = self.dtype_to_tuple(dtypes_str) - if len(shapes) > len(dtypes): - print(f"[ERROR] The size of shape is greater than that of dtypes.") - return 0 - if len(shapes) < len(dtypes): - shapes = list(shapes) - shapes.extend([(1,)] * (len(dtypes) - len(shapes))) - all_size = 0 - for index, shape in enumerate(shapes): - element_count = self.get_element_count(shape) - dtype_size = self.get_dtype_size(dtypes[index]) - all_size += element_count * dtype_size - return all_size - - def get_calc_size(self): - # input and output bytes (MB) - if not self.input_shapes or not self.output_shapes: - print("[ERROR] There is no tensor data, do not assess vector op performance.") - return 0 - intput_size = self.get_size(self.input_shapes, self.input_data_types) - output_size = self.get_size(self.output_shapes, self.output_data_types) - return (intput_size + output_size) / (Constant.BYTE_UNIT_TRANS * Constant.BYTE_UNIT_TRANS) - - def get_throughput(self): - # throughput(GB/s) - if not self.task_duration or abs(self.task_duration) < 1e-6: - print("[ERROR] There is no task_duration, do not assess vector op performance.") - return 0 - return self.row[Constant.TITLE.SIZE] / Constant.BYTE_UNIT_TRANS / self.task_duration * Constant.UNIT_TRANS * Constant.UNIT_TRANS - - def get_perf_color(self): - return PerfColor.WHITE - - def update(self): - self.row[Constant.TITLE.SIZE] = self.get_calc_size() - self.row[Constant.TITLE.THROUGHPUT] = self.get_throughput() - self.row[Constant.TITLE.COLOR] = self.get_perf_color().name - return self.row - - -class VecOpPerf(OpPerf): - def get_perf_color(self) -> PerfColor: - throughput = self.row[Constant.TITLE.THROUGHPUT] - op_duration = self.task_duration - tp_threshold = Constant.TP_THRESHOLD - if throughput == 0: - return PerfColor.WHITE - if throughput < tp_threshold / 2 and op_duration > 20: - return PerfColor.RED - elif tp_threshold / 2 <= throughput < tp_threshold: - return PerfColor.YELLOW - else: - return PerfColor.GREEN - - -class CubeOpPerf(OpPerf): - def get_perf_color(self) -> PerfColor: - aic_mac_ratio = self.get_mac_ratio() - if not aic_mac_ratio: - print("[WARNING] There is no aic_mac_ratio, do not assess cube op performance.") - return PerfColor.WHITE - elif aic_mac_ratio < 0.6: - return PerfColor.RED - elif 0.6 <= aic_mac_ratio < 0.8: - return PerfColor.YELLOW - else: - return PerfColor.GREEN diff --git a/profiler/advisor/common/constant.py b/profiler/advisor/common/constant.py index 1d23b1d9ac..90de9fe7b0 100644 --- a/profiler/advisor/common/constant.py +++ b/profiler/advisor/common/constant.py @@ -12,217 +12,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from enum import Enum - - -class CsvTitle: - MODEL_NAME = "Model Name" - MODEL_ID = "Model ID" - TASK_ID = "Task ID" - STREAM_ID = "Stream ID" - INFER_ID = "Infer ID" - TASK_START_TIME = "Task Start Time(us)" - TASK_WAIT_TIME = "Task Wait Time(us)" - BLOCK_DIM = "Block Dim" - MIX_BLOCK_DIM = "Mix Block Dim" - HF32_ELIGIBLE = "HF32 Eligible" - INPUT_SHAPES = "Input Shapes" - INPUT_DATA_TYPES = "Input Data Types" - INPUT_FORMATS = "Input Formats" - OUTPUT_SHAPES = "Output Shapes" - OUTPUT_DATA_TYPES = "Output Data Types" - OUTPUT_FORMATS = "Output Formats" - CONTEXT_ID = "Context ID" - AICORE_TIME = "aicore_time(us)" - AIC_TOTAL_CYCLES = "aic_total_cycles" - AIC_MAC_TIME = "aic_mac_time(us)" - AIC_MAC_RATIO = "aic_mac_ratio" - AIC_SCALAR_TIME = "aic_scalar_time(us)" - AIC_SCALAR_RATIO = "aic_scalar_ratio" - AIC_MTE1_TIME = "aic_mte1_time(us)" - AIC_MTE1_RATIO = "aic_mte1_ratio" - AIC_MTE2_TIME = "aic_mte2_time(us)" - AIC_MTE2_RATIO = "aic_mte2_ratio" - AIC_FIXPIPE_TIME = "aic_fixpipe_time(us)" - AIC_FIXPIPE_RATIO = "aic_fixpipe_ratio" - AIC_ICACHE_MISS_RATE = "aic_icache_miss_rate" - AIV_TIME = "aiv_time(us)" - AIV_TOTAL_CYCLES = "aiv_total_cycles" - AIV_VEC_TIME = "aiv_vec_time(us)" - AIV_VEC_RATIO = "aiv_vec_ratio" - AIV_SCALAR_TIME = "aiv_scalar_time(us)" - AIV_SCALAR_RATIO = "aiv_scalar_ratio" - AIV_MTE2_TIME = "aiv_mte2_time(us)" - AIV_MTE2_RATIO = "aiv_mte2_ratio" - AIV_MTE3_TIME = "aiv_mte3_time(us)" - AIV_MTE3_RATIO = "aiv_mte3_ratio" - AIV_ICACHE_MISS_RATE = "aiv_icache_miss_rate" - CUBE_UTILIZATION = "cube_utilization( %)" - TASK_DURATION_SUM = "Task Duration Sum(us)" - TASK_DURATION_MEAN = "Task Duration Mean(us)" - TASK_DURATION_STD = "Task Duration Std(us)" - TASK_DURATION_RATIO = "Task Duration Ratio(100%)" - SIZE = "size(MB)" - THROUGHPUT = "throughput(GB/s)" - COLOR = "color" - GAP = "Gap(us)" - DURATION_SUM = "Duration Sum(us)" - COUNT = "Count" - MAX_DURATION = "Max Duration(us)" - MIN_DURATION = "Min Duration(us)" - AVG_DURATION = "Avg Duration(us)" - DURATION_RATIO = "Duration Ratio" - INDEX = "Index" - - -# 定义CSV_TITILE_V1类,继承自CSV_TITILE类, 适配旧版csv -class CsvTitleV1(CsvTitle): - OP_NAME = "Op Name" - OP_TYPE = "OP Type" - TASK_TYPE = "Task Type" - TASK_DURATION = "Task Duration(us)" - - -# 定义CSV_TITILE_V1类,继承自CSV_TITILE类, 适配新版csv -class CsvTitleV2(CsvTitle): - OP_NAME = "Name" - OP_TYPE = "Type" - TASK_TYPE = "Accelerator Core" - TASK_DURATION = "Duration(us)" - - -class Constant: - DTYPE_SIZE_MAP = {"int8": 1, "uint8": 1, - "int16": 2, "uint16": 2, - "int32": 4, "uint32": 4, - "int64": 8, "uint64": 8, - "float16": 2, - "bfloat16": 2, - "bf16": 2, - "dt_bf16": 2, - "float32": 4, - "float": 4, - "float64": 8, - "complex64": 8, - "complex128": 16, - "bool": 1} - TP_THRESHOLD = 1150 - MAX_INPUT_MODE_LEN = 30 - MAX_INPUT_ADVICE_LEN = 30 - SMALL_OP_DUR_RATIO = 0.2 - SMALL_OP_NUM_RATIO = 0.2 - BYTE_UNIT_TRANS = 1024 - UNIT_TRANS = 1000 - - # mode list - COMPUTE = "compute" - TIMELINE = "timeline" - CLUSTER = "cluster" - OVERALL = "overall" - PIPELINE = "pipeline" - - # advice list - SLOW_RANK = "slow rank" - SLOW_LINK = "slow link" - KERNEL = "kernel" - - # compute - NPU_FUSED = "npu_fused" - NPU_SLOW = "npu_slow" - - # timeline - OPTIM = "optimizer" - OP_SCHE = "op_schedule" - - # overall - SUMMARY = "summary" - - PT_PROF_SUFFIX = "ascend_pt" - ASCEND_PROFILER_OUTPUT = "ASCEND_PROFILER_OUTPUT" - COLLECTION_PATH = "collection_path" - CLUSTER_ANALYSIS_OUTPUT = "cluster_analysis_output" - KERNEL_DETAILS_CSV = "kernel_details.csv" - CLUSTER_STEP_TIME_CSV = "cluster_step_trace_time.csv" - CLUSTER_COMM_JSON = "cluster_communication.json" - - # pipline - OP_NAME = "name" - OP_TID = "tid" - PID = "pid" - TS = "ts" - DUR = "dur" - CAT = "cat" - ARGS = "args" - PH = "ph" - ID = "id" - PH_START = "s" - PH_BEGIN = "B" - PH_END = "E" - PH_META = "M" - PH_X = "X" - CNAME = "cname" - PROCESS_NAME = "process_name" - FRAMEWORK_NAME = "Python" - ASCEND_HARDWARE_NAME = "Ascend Hardware" - ASYNC_NPU = "async_npu" - STEP_PREFIX = "ProfilerStep#" - FP_ATEN_OP = "aten" - FP_C10D_OP = "c10d" - HCOM_OP_PREFIX = "hcom_" - BP_AUTOGRAD_OP = "autograd" - TRACE_VIEW_JSON = "trace_view.json" - - # pattern_dict key: pattern, value: pattern name - PATTERN_DICT = {("Add", "DropOutDoMask", "Add"): "bias_dropout_add", - ("BatchMatMul", "Mul", "Cast", "Mul", "MaskedFill", "SoftmaxV2", "Cast", "DropOutDoMask", - "AsStrided", "BatchMatMul", "Transpose"): "FA", - ("Transpose", "Transpose", "Transpose", "Mul", "Transpose", "BatchMatMulV2", "MaskedFill", - "Cast", "SoftmaxV2", "Cast", "DropOutDoMask", "BatchMatMulV2", "Transpose"): "FA", - ("Transpose", "BatchMatMulV2", "Transpose", "Transpose", "BatchMatMulV2", "ZerosLike", - "DropOutDoMask", "Cast", "SoftmaxGrad", "Cast", "MaskedFill", "BatchMatMulV2", - "BatchMatMulV2", "Mul"): "FA", - ("Cast", "Square", "ReduceMeanD", "Add", "Rsqrt", "Cast", "Cast", "Mul", "Cast", "Cast", - "Mul", "Cast"): "RMSNORM", - ("Cast", "LayerNorm", "Cast"): "LayerNorm", - ("Add", "LayerNorm"): "AddLayerNorm", - ("Add", "LayerNormV3"): "AddLayerNorm", - ("Gelu", "Add"): "GeluAdd", - ("Cast", "Square", "MemSet", "ReduceMean", "Add", "Rsqrt", "Mul", "Cast", "Mul"): "RMSNorm", - ("BatchMatMul", "RealDiv", "Add", "Maximum", "SoftmaxV2", "Cast", "BatchMatMul"): "FA", - ("BatchMatMulV2", "RealDiv", "Add", "Cast", "Maximum", "Cast", "SoftmaxV2", "AsStrided", - "BatchMatMulV2"): "FA", - ("BatchMatMulV2", "RealDiv", "Add", "Cast", "SoftmaxV2", "Cast", "BroadcastTo", - "BatchMatMulV2"): "FA", - ("Mul", "Slice", "Neg", "Slice", "ConcatD", "Cast", "Mul", "Add"): "RotaryMul", - ("Mul", "AsStrided", "Neg", "AsStrided", "ConcatD", "Mul", "Add"): "RotaryMul", - ("Mul", "Slice", "Neg", "Slice", "ConcatD", "Mul", "Add"): "RotaryMul", - ("MatMulV2", "Swish", "MatMulV2", "Mul", "MatMulV2"): "FFN", - ("Transpose", "Transpose", "GatherElement", "Transpose"): "GatherElement", - ("Slice", "Slice", "Swish", "Mul"): "torch_npu.npu_swiglu", - ("Cast", "Mul", "MaskedFill", "SoftmaxV2", "Cast"): "torch_npu.npu_scaled_masked_softmax", - ("Mul", "Slice", "Neg", "Slice", "ConcatD", "Mul"): "torch_npu.npu_rotary_mul", - ("Cast", "Square", "ReduceMeanD", "Add", "Rsqrt", "Mul", "Cast", "Mul"): "torch_npu.npu_rms_norm"} - TITLE = CsvTitleV2 - - @classmethod - def update_title(cls): - cls.TITLE = CsvTitleV1 - - -class CoreType: - AIV = "AI_VECTOR_CORE" - AIC = "AI_CORE" - AICPU = "AI_CPU" - MIX_AIV = "MIX_AIV" - MIX_AIC = "MIX_AIC" - HCCL = "HCCL" - - -class PerfColor(Enum): - WHITE = 0 - GREEN = 1 - YELLOW = 2 - RED = 3 # timeline DEQUEUE = "Dequeue" diff --git a/profiler/advisor/common/trace_view_json.py b/profiler/advisor/common/trace_view_json.py deleted file mode 100644 index 8171f06ee2..0000000000 --- a/profiler/advisor/common/trace_view_json.py +++ /dev/null @@ -1,209 +0,0 @@ -# Copyright (c) 2024, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os -from abc import abstractmethod -from dataclasses import dataclass -from dataclasses import field -from typing import Dict -from typing import List - -import pandas as pd - -from common_func.file_manager import FileManager - - -@dataclass -class TraceObj: - ph: str = "" - bp: str = "" - cat: str = "" - name: str = "" - pid: int = 0 - tid: int = 0 - id: int = 0 - ts: str = "" - dur: float = 0.0 - args: dict = field(default='unknown') - - @abstractmethod - def hash(self): - raise Exception("To be implemented") - - def valid(self): - return self.name != "" - - def check_hashable(self): - if not self.valid(): - raise Exception("Illegal {} to hash".format(self.__class__.name)) - - -@dataclass -class Process(TraceObj): - def hash(self): - self.check_hashable() - # msprof 保证name唯一性 - return self.args.get("name") - - -@dataclass -class Thread(TraceObj): - def hash(self): - self.check_hashable() - # msprof 保证name唯一性 - return self.args.get("name") - - -@dataclass -class DurationEvent(TraceObj): - def hash(self): - self.check_hashable() - return self.ts - - -@dataclass -class FlowEvent(TraceObj): - s_point_ts: str = "" - e_point_ts: str = "" - - def hash(self): - self.check_hashable() - return self.e_point_ts - - -class TraceViewJson: - - def __init__(self, path): - self.processes: Dict[str, Process] = dict() - self.threads: Dict[str, Thread] = dict() - self.python_dur_events: Dict[str, DurationEvent] = dict() - self.cann_dur_events: Dict[str, DurationEvent] = dict() - self.ascend_hardware_dur_events: Dict[str, DurationEvent] = dict() - self.torch_2_npu_flow_events: Dict[str, FlowEvent] = dict() - traces = FileManager.read_json_file(path) - self._load_obj(traces) - - def get_call_stack(self, data: pd.DataFrame, index_id: int, ts_col: str) -> str: - if ts_col not in data.columns.tolist(): - print("[ERROR] No {} col found in data columns.".format(ts_col)) - return "" - row = data.loc[index_id] - timestamp = row[ts_col] - flow_event = self.get_torch_2_npu_flow_event(timestamp) - if not flow_event.valid(): - print("[ERROR] Get flow event failed for pattern {}.".format(row['pattern'])) - return "" - flow_event_s_key = flow_event.s_point_ts - python_dur_events = self.get_python_dur_events_contain_ts(flow_event_s_key) - if not python_dur_events: - print("[ERROR] No python dur event found for pattern {}.".format(row['pattern'])) - return "" - # 保持新老版本callstack兼容性 - if python_dur_events[0].args.get("Call stack"): - # 旧版本 - call_stack_list = python_dur_events[0].args.get("Call stack").split(";") - else: - python_dur_events.sort(key=lambda e: e.ts) - # 新版本 - call_stack_list = [event.name for event in python_dur_events if event.cat == "python_function"] - call_stack = "\n".join(call_stack_list) - return call_stack - - def get_torch_2_npu_flow_event(self, end_time) -> FlowEvent: - if not self.torch_2_npu_flow_events or not self.torch_2_npu_flow_events.get(end_time): - print("[ERROR] Find flow event failed for ts: {}".format(end_time)) - return FlowEvent() - return self.torch_2_npu_flow_events.get(end_time) - - def get_python_dur_events_contain_ts(self, ts) -> List[DurationEvent]: - res = [] - for event in self.python_dur_events.values(): - if float(event.ts) <= float(ts) <= float(event.ts) + event.dur: - res.append(event) - return res - - def _load_obj(self, traces): - self._load_format(traces) - if not self._check_format(): - print("[ERROR] parse json failed for error format") - return - self._load_duration_events(traces) - self._load_torch_to_npu_flow_events(traces) - - def _check_format(self): - # 当前功能只需要这两个process,可扩展 - check_processes = ['Python', 'Ascend Hardware'] - for check_process in check_processes: - if check_process in self.processes: - continue - print("[ERROR] {} process not found in json.".format(check_process)) - return False - return True - - # 加载pid, tid头 - def _load_format(self, traces: List[Dict]): - for i, trace in enumerate(traces): - if trace.get('name') == 'process_name': - if not trace.get('args') or not trace.get('args').get('name') or not trace.get('pid'): - continue - process = Process(**trace) - self.processes[process.hash()] = process - if trace.get('name') == 'thread_name': - if not trace.get('args') or not trace.get('args').get('name') or not trace.get('tid'): - continue - thread = Thread(**trace) - self.threads[thread.hash()] = thread - - def _load_duration_events(self, traces: List[Dict]): - def check_events(_trace): - return _trace.get('name') and _trace.get("ts") and _trace.get("dur") - - python_pid = self.processes.get("Python").pid - cann_pid = self.processes.get("CANN").pid - ascend_hardware_pid = self.processes.get("Ascend Hardware").pid - for i, trace in enumerate(traces): - if trace.get('ph') != 'X': - continue - if not check_events(trace): - continue - event = DurationEvent(**trace) - if trace.get('pid') == python_pid: - self.python_dur_events[event.hash()] = event - elif trace.get('pid') == cann_pid: - self.cann_dur_events[event.hash()] = event - elif trace.get("pid") == ascend_hardware_pid: - self.ascend_hardware_dur_events[event.hash()] = event - - def _load_torch_to_npu_flow_events(self, traces: List[Dict]): - def check_events(_trace): - return _trace.get('name') and _trace.get("id") and _trace.get("ts") - - flow_events_table_by_id = dict() - - python_pid = self.processes.get("Python") - for i, trace in enumerate(traces): - if trace.get('ph') != 's' and trace.get('ph') != 'f' and trace.get('pid') != python_pid: - continue - if not check_events(trace): - continue - event = flow_events_table_by_id.get(trace.get("id")) - if not event: - event = FlowEvent(**trace) - if trace.get('ph') == 's': - event.s_point_ts = trace.get('ts') - else: - event.e_point_ts = trace.get('ts') - flow_events_table_by_id[event.id] = event - - self.torch_2_npu_flow_events = {eve.hash(): eve for eve in flow_events_table_by_id.values()} diff --git a/profiler/advisor/common/trace_view_preprocessor.py b/profiler/advisor/common/trace_view_preprocessor.py deleted file mode 100644 index 14a13066f6..0000000000 --- a/profiler/advisor/common/trace_view_preprocessor.py +++ /dev/null @@ -1,208 +0,0 @@ -# Copyright (c) 2023, Huawei Technologies Co., Ltd. -# All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re -import sys -from typing import Optional -from dataclasses import dataclass - -from profiler.advisor.common.constant import Constant - - -@dataclass -class FineTraceViewData: - py_pid: int = -1 - fp_tid: int = -1 - bp_tid: int = -1 - ascend_pid: int = -1 - min_ts: str = str(sys.maxsize) - max_ts: str = "0" - hcom_tids: list = None - fp_ops: list = None - bp_ops: list = None - hcom_ops: list = None - npu_ops_ts_dur: dict = None - torch_to_npu_links: list = None - - def __post_init__(self): - self.hcom_tids = self.hcom_tids or [] - self.fp_ops = self.fp_ops or [] - self.bp_ops = self.bp_ops or [] - self.hcom_ops = self.hcom_ops or [] - self.npu_ops_ts_dur = self.npu_ops_ts_dur or {} - self.torch_to_npu_links = self.torch_to_npu_links or [] - - def sort(self): - self.fp_ops.sort(key=lambda x: x[Constant.TS]) - self.bp_ops.sort(key=lambda x: x[Constant.TS]) - self.hcom_ops.sort(key=lambda x: x[Constant.TS]) - self.torch_to_npu_links.sort(key=lambda x: x[Constant.TS]) - - -class TraceViewPreProcessor: - """ - Trace view data preprocess - """ - - @staticmethod - def _is_fp_op(op_name: str) -> bool: - """ - check whether op is fp op - """ - return op_name.startswith(Constant.FP_ATEN_OP) or op_name.startswith(Constant.FP_C10D_OP) - - @staticmethod - def _is_fp_data(data: dict, fp_tid: int, py_pid: int) -> bool: - """ - check whether data is valid fp data - """ - return data[Constant.OP_TID] == fp_tid and \ - Constant.TS in data and Constant.DUR in data and \ - not data[Constant.OP_NAME].startswith(Constant.STEP_PREFIX) and \ - data[Constant.PID] == py_pid - - @staticmethod - def _is_bp_op(op_name: str) -> bool: - """ - check whether op is bp op - """ - return op_name.startswith(Constant.BP_AUTOGRAD_OP) - - @staticmethod - def _is_bp_data(data: dict, bp_tid: int, py_pid: int) -> bool: - """ - check whether data is valid bp data - """ - return data[Constant.OP_TID] == bp_tid and \ - Constant.TS in data and Constant.DUR in data and \ - data[Constant.PID] == py_pid - - @staticmethod - def _is_torch_to_npu_link(data: dict, fp_tid: int) -> bool: - """ - check whether data is torch to npu link - """ - return Constant.CAT in data and data[Constant.CAT] == Constant.ASYNC_NPU and \ - data[Constant.PH] == Constant.PH_START and \ - data[Constant.PID] == fp_tid - - @staticmethod - def _is_send_recv_op(op_name: str) -> bool: - """ - check whether op is hcom send or recv op - """ - # eg: hcom_BatchSendRecv__101_0_1 - p1 = re.compile(r'hcom_\w+SendRecv__\d+') - # eg: hcom_send__101_0_1 - p2 = re.compile(r'hcom_send__\d+') - # eg: hcom_receive__101_0_1 - p3 = re.compile(r'hcom_receive__\d+') - return bool(p1.match(op_name)) or bool(p2.match(op_name)) or bool(p3.match(op_name)) - - @staticmethod - def _is_hcom_op(op_name: str) -> bool: - """ - check whether data is hcom data - """ - return op_name.startswith(Constant.HCOM_OP_PREFIX) - - @staticmethod - def _is_python_process(data: dict) -> bool: - """ - check whether data is python process - """ - return Constant.PH in data and data[Constant.PH] == Constant.PH_META and \ - data[Constant.OP_NAME] == Constant.PROCESS_NAME and \ - data[Constant.ARGS][Constant.OP_NAME] == Constant.FRAMEWORK_NAME - - @staticmethod - def _is_step_op(data: dict) -> bool: - """ - check whether data is step data - """ - return data[Constant.OP_NAME].startswith(Constant.STEP_PREFIX) - - @staticmethod - def _is_ascend_process(data: dict) -> bool: - """ - check whether data is ascend process data - """ - return Constant.PH in data and data[Constant.PH] == Constant.PH_META and \ - data[Constant.OP_NAME] == Constant.PROCESS_NAME and \ - data[Constant.ARGS][Constant.OP_NAME] == Constant.ASCEND_HARDWARE_NAME - - @staticmethod - def _is_npu_op(data: dict, ascend_pid: int) -> bool: - """ - check whether data is npu op - """ - return Constant.PH in data and data[Constant.PH] == Constant.PH_X and \ - not data[Constant.OP_NAME].isupper() and \ - data[Constant.PID] == ascend_pid - - def process(self, raw_data: list) -> Optional[FineTraceViewData]: - """ - preprocess raw data - """ - if not raw_data: - print("[ERROR] No raw data found in trace view data.") - return None - - raw_fp_tids, raw_bp_tids, raw_hcom_tids = set(), set(), set() - fine_data = FineTraceViewData() - - # counting fp ops and bp ops tid and ascend pid - for data in raw_data: - if self._is_fp_op(data[Constant.OP_NAME]): - raw_fp_tids.add(data[Constant.OP_TID]) - elif self._is_bp_op(data[Constant.OP_NAME]): - raw_bp_tids.add(data[Constant.OP_TID]) - elif self._is_send_recv_op(data[Constant.OP_NAME]): - fine_data.hcom_ops.append(data) - raw_hcom_tids.add(data[Constant.OP_TID]) - elif self._is_python_process(data): - fine_data.py_pid = data[Constant.PID] - elif self._is_ascend_process(data): - fine_data.ascend_pid = data[Constant.PID] - - # find max and min ts in hcom ops - if self._is_hcom_op(data[Constant.OP_NAME]): - # for compatibility with old data (ts is float type) - ts = data[Constant.TS] if not isinstance(data[Constant.TS], float) else str(data[Constant.TS]) - fine_data.min_ts = min(fine_data.min_ts, ts) - fine_data.max_ts = max(fine_data.max_ts, ts) - - unique_fp_tid = list(raw_fp_tids - raw_bp_tids) - unique_bp_tid = list(raw_bp_tids) - fine_data.hcom_tids = list(raw_hcom_tids) - - if not unique_fp_tid or not unique_bp_tid: - print("[INFO] No fp or bp tid found in trace view data.") - else: - fine_data.fp_tid, fine_data.bp_tid = unique_fp_tid[0], unique_bp_tid[0] - - # filter fp ops and bp ops and torch_to_npu_links - for data in raw_data: - if self._is_fp_data(data, fine_data.fp_tid, fine_data.py_pid): - fine_data.fp_ops.append(data) - elif self._is_bp_data(data, fine_data.bp_tid, fine_data.py_pid): - fine_data.bp_ops.append(data) - elif self._is_torch_to_npu_link(data, fine_data.fp_tid): - fine_data.torch_to_npu_links.append(data) - elif self._is_npu_op(data, fine_data.ascend_pid): - fine_data.npu_ops_ts_dur[data[Constant.TS]] = data[Constant.DUR] - - fine_data.sort() - return fine_data diff --git a/profiler/advisor/interface/interface.py b/profiler/advisor/interface/interface.py index aedbd2e116..3bb3d3f5a8 100644 --- a/profiler/advisor/interface/interface.py +++ b/profiler/advisor/interface/interface.py @@ -8,7 +8,6 @@ from profiler.advisor.utils.utils import Timer from profiler.advisor.analyzer.cluster.slow_rank_analyser import SlowRankAnalyzer from profiler.advisor.analyzer.cluster.slow_link_analyser import SlowLinkAnalyzer from profiler.advisor.analyzer.overall.overall_summary_analyzer import OverallSummaryAnalyzer -from profiler.advisor.analyzer.computation.npu_fused.npu_slow_advice import NpuSlowAnalyzer class Interface: supported_analyzer = { -- Gitee From 4e173466d9b1a0e8ab1e3915298cd224dba65041 Mon Sep 17 00:00:00 2001 From: fanxiaotong Date: Fri, 17 May 2024 17:46:38 +0800 Subject: [PATCH 15/21] advisor --- .../compute_advice/test_npu_slow_advice.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/profiler/test/ut/advisor/advisor_backend/compute_advice/test_npu_slow_advice.py b/profiler/test/ut/advisor/advisor_backend/compute_advice/test_npu_slow_advice.py index 894367d070..8830d49599 100644 --- a/profiler/test/ut/advisor/advisor_backend/compute_advice/test_npu_slow_advice.py +++ b/profiler/test/ut/advisor/advisor_backend/compute_advice/test_npu_slow_advice.py @@ -6,7 +6,7 @@ import csv import unittest from advisor_backend.interface import Interface -from advisor_backend.compute_advice.npu_slow_advice import +from advisor_backend.compute_advice.npu_slow_advice import NpuSlowAdvice class TestNpuSlowAdvice(unittest.TestCase): @@ -186,7 +186,7 @@ class TestNpuSlowAdvice(unittest.TestCase): self.create_kernel_details() interface = Interface(self.ASCEND_PT_DIR) data = interface.get_data('compute', 'npu_slow') - call_stack = (self.ASCEND_PT_DIR).get_call_stack(data, index_id=0, ts_col="Start Time(us)") + call_stack = NpuSlowAdvice(self.ASCEND_PT_DIR).get_call_stack(data, index_id=0, ts_col="Start Time(us)") self.assertEqual(9, len(data)) self.assertEqual("", call_stack) @@ -197,8 +197,8 @@ class TestNpuSlowAdvice(unittest.TestCase): interface = Interface(self.ASCEND_PT_DIR) data = interface.get_data('compute', 'npu_slow') slow_op_data = data[data["color"] == "RED"] - .save_to_excel(data, file_path=os.path.join(self.ASCEND_PT_DIR, "slow_op.xlsx")) - call_stack = (self.ASCEND_PT_DIR).get_call_stack(data, index_id=0, ts_col="Start Time(us)") + NpuSlowAdvice.save_to_excel(data, file_path=os.path.join(self.ASCEND_PT_DIR, "slow_op.xlsx")) + call_stack = NpuSlowAdvice(self.ASCEND_PT_DIR).get_call_stack(data, index_id=0, ts_col="Start Time(us)") self.assertEqual(9, len(data)) self.assertEqual(2, len(slow_op_data)) print(call_stack) @@ -213,8 +213,8 @@ class TestNpuSlowAdvice(unittest.TestCase): interface = Interface(self.ASCEND_PT_DIR) data = interface.get_data('compute', 'npu_slow') slow_op_data = data[data["color"] == "RED"] - .save_to_excel(data, file_path=os.path.join(self.ASCEND_PT_DIR, "slow_op.xlsx")) - call_stack = (self.ASCEND_PT_DIR).get_call_stack(data, index_id=0, ts_col="Start Time(us)") + NpuSlowAdvice.save_to_excel(data, file_path=os.path.join(self.ASCEND_PT_DIR, "slow_op.xlsx")) + call_stack = NpuSlowAdvice(self.ASCEND_PT_DIR).get_call_stack(data, index_id=0, ts_col="Start Time(us)") self.assertEqual(9, len(data)) self.assertEqual(2, len(slow_op_data)) print(call_stack) -- Gitee From 690955af98bf5a127b20fd48df75fb3ca16e4007 Mon Sep 17 00:00:00 2001 From: renlei Date: Sat, 18 May 2024 11:01:09 +0800 Subject: [PATCH 16/21] aicpu rules support cann8.0.0 version --- .../computation/aicpu/aicpu_checker.py | 6 +-- .../op_compile/dynamic_shape_checker.py | 41 +++++++++++-------- .../analyzer/computation/operator_checker.py | 2 +- profiler/advisor/common/constant.py | 2 +- .../config/profiling_data_version_config.yaml | 6 +-- profiler/advisor/rules/aicpu_rules.yaml | 36 ++++++++-------- profiler/advisor/utils/utils.py | 2 + profiler/test/tools/tool.py | 8 ---- 8 files changed, 51 insertions(+), 52 deletions(-) diff --git a/profiler/advisor/analyzer/computation/aicpu/aicpu_checker.py b/profiler/advisor/analyzer/computation/aicpu/aicpu_checker.py index 4654d97225..58dfa9bb08 100644 --- a/profiler/advisor/analyzer/computation/aicpu/aicpu_checker.py +++ b/profiler/advisor/analyzer/computation/aicpu/aicpu_checker.py @@ -42,7 +42,7 @@ class AicpuChecker(OperatorChecker): def load_aicpu_rules(self, rule_path="rules/aicpu_rules.yaml") -> Dict: if not os.path.isabs(rule_path): rule_path = os.path.join(os.path.dirname(__file__), - "../../computation/", "../", rule_path) + "../../../", rule_path) if not os.path.exists(rule_path): logger.warning("Skip analyze aicpu issues, because %s does not exist.", rule_path) @@ -64,8 +64,8 @@ class AicpuChecker(OperatorChecker): support_checkers = [] for checkers in aicpu_rules['CommonChecker']: for key, value in checkers.items(): - if key == 'DataTypeChecker' and value['cann_version'] != self.cann_verson: continue - support_checkers.append(checkers) + if key == 'DataTypeChecker' and self.cann_version in value['cann_version']: + support_checkers.append(checkers) aicpu_rules['CommonChecker'] = support_checkers return diff --git a/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py b/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py index 4d405eb918..902f8b7f66 100644 --- a/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py +++ b/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py @@ -22,26 +22,35 @@ class DynamicShapeChecker(OperatorChecker): _op_views: List = [] def __init__(self, cann_version) -> None: - super().__init__(cann_version = cann_version) + super().__init__(cann_version=cann_version) def check(self, profiling_database) -> bool: + less_than_cann800_list = [constant.CANN_VERSION_C30, constant.CANN_VERSION_C13, constant.CANN_VERSION_C15] # CANN 8.0.0 之前从 ge_info 中获取 op_state 属性,进行动态 shape 逻辑判断 - if hasattr(profiling_database, "ge_info") and profiling_database.ge_info: - ge_info = profiling_database.ge_info - static_shape_operators = ge_info.get_static_shape_operators() - if len(static_shape_operators) == 0: - OperatorChecker.IS_ALL_OPERATOR_DYNAMIC_SHAPE = True - return True - # CANN 8.0.0 之后 op_state 属性从 op_summary 文件中获取 - elif hasattr(profiling_database, "op_summary"): - static_shape_operators = profiling_database.op_summary.get_static_shape_operators() - if len(static_shape_operators) == 0: - OperatorChecker.IS_ALL_OPERATOR_DYNAMIC_SHAPE = True - return True + if self.cann_version in less_than_cann800_list: + if hasattr(profiling_database, "ge_info") and profiling_database.ge_info.op_state_info_list is not None: + ge_info = profiling_database.ge_info + static_shape_operators = ge_info.get_static_shape_operators() + if len(static_shape_operators) == 0: + OperatorChecker.IS_ALL_OPERATOR_DYNAMIC_SHAPE = True + return True + else: + logger.warning( + "Skip dynamic shape checker because of not containing ge_info.db file in host filefloder.\n" + "To enable dynamic shape checker, please try to set data_simplification=False in experimental_config.\n" + "More details please refer to link : %s", constant.ASCEND_PROFILER_URL) else: - logger.warning("Skip dynamic shape checker because of not containing ge_info.db file in host filefloder.\n" - "To enable dynamic shape checker, please try to set data_simplification=False in experimental_config.\n" - "More details please refer to link : %s", constant.ASCEND_PROFILER_URL) + # CANN 8.0.0 之后 op_state 属性从 op_summary 文件中获取 + if hasattr(profiling_database, "op_summary") and len(profiling_database.op_summary.op_list) > 0: + static_shape_operators = profiling_database.op_summary.get_static_shape_operators() + if len(static_shape_operators) == 0: + OperatorChecker.IS_ALL_OPERATOR_DYNAMIC_SHAPE = True + return True + else: + logger.warning( + "Skip dynamic shape checker because of not containing op_summary.csv file in current filefloder." + ) + return False def make_record(self, profiling_database) -> OptimizeRecord: diff --git a/profiler/advisor/analyzer/computation/operator_checker.py b/profiler/advisor/analyzer/computation/operator_checker.py index e8490ff206..d714e0952c 100644 --- a/profiler/advisor/analyzer/computation/operator_checker.py +++ b/profiler/advisor/analyzer/computation/operator_checker.py @@ -38,7 +38,7 @@ class OperatorChecker(VersionControl): _tune_op_list: List[str] = [] def __init__(self, cann_version: str): - self.cann_verson = cann_version + self.cann_version = cann_version self._op_list: List[OpInfo] = [] def check(self, profiling_data: ProfilingDataset) -> bool: diff --git a/profiler/advisor/common/constant.py b/profiler/advisor/common/constant.py index 664753c724..fee5ed9f81 100644 --- a/profiler/advisor/common/constant.py +++ b/profiler/advisor/common/constant.py @@ -32,7 +32,7 @@ NO_STACK_REASON_MAP = { TIMELINE_BACKWARD_NO_STACK_CODE: "Backward broadcast, without call stacks in profiling.", TIMELINE_ACL_TO_NPU_NO_STACK_CODE: "Incoming flow is 'acl_to_npu', without call stacks in profiling." } -TIMELINE_API_DOC_URL = "https://3ms.huawei.com/hi/group/3942456/wiki_7680982.html" +TIMELINE_API_DOC_URL = "https://support.huaweicloud.com/bestpractice-modelarts/modelarts_10_2516.html" AFFINITY_TRAINING_API = "Affinity training api" TIMELINE_WITH_STACK_DOC_URL = "https://www.hiascend.com/document/detail/zh/canncommercial/" \ "70RC1/modeldevpt/ptmigr/AImpug_0067.html" diff --git a/profiler/advisor/config/profiling_data_version_config.yaml b/profiler/advisor/config/profiling_data_version_config.yaml index 45f4b5c0f7..f73aecd3ba 100644 --- a/profiler/advisor/config/profiling_data_version_config.yaml +++ b/profiler/advisor/config/profiling_data_version_config.yaml @@ -72,9 +72,9 @@ versions: msprof: Msprof ge_info: GeInfo file_attr: - op_summary: ^op_summary_\d+_\d+_\.csv$ - task_time: ^task_time_\d+_\d+_\.json$ - msprof: ^msprof_\d+_\d+_\.json$ + op_summary: ^op_summary_\d+_\d+\.csv$ + task_time: ^task_time_\d+_\d+\.json$ + msprof: ^msprof_\d+_\d+\.json$ ge_info: ge_info.db diff --git a/profiler/advisor/rules/aicpu_rules.yaml b/profiler/advisor/rules/aicpu_rules.yaml index 053f4150e8..9313700c80 100644 --- a/profiler/advisor/rules/aicpu_rules.yaml +++ b/profiler/advisor/rules/aicpu_rules.yaml @@ -1,8 +1,9 @@ DataTypeSuggeation: &DataTypeSuggeation "Data type {} in {} operator may cause AICPU issues, Try to convert to {} if possible." +AICPU_DOC_URL: &AICPU_DOC_URL "https://support.huaweicloud.com/bestpractice-modelarts/modelarts_10_2517.html" CommonChecker: - DataTypeChecker: - cann_version: 7.0.RC1 + cann_version: [7.0.RC1] op_type: [ __ALL__ ] ignore_type: [ cast, tensorequal, equal, nonzero, mul ] input: [ float, float32, float16, bool, int32, uint32, int64, uint64, int8, uint8, int16, uint16, dt_bf16 ] @@ -10,42 +11,42 @@ CommonChecker: suggestion: *DataTypeSuggeation - DataTypeChecker: - cann_version: 7.0.RC1 + cann_version: [7.0.RC1] op_type: [ cast ] input: [ float, float32, float16, bool, int32, uint32, int64, uint64, uint8, dt_bf16 ] output: [ float, float32, float16, bool, int32, uint32, int64, uint64, uint8, dt_bf16 ] suggestion: *DataTypeSuggeation - DataTypeChecker: - cann_version: 7.0.RC1 + cann_version: [7.0.RC1] op_type: [ tensorequal ] input: [ float, float32, float16, bool, int32, int8, uint8 ] output: [ bool ] suggestion: *DataTypeSuggeation - DataTypeChecker: - cann_version: 7.0.RC1 + cann_version: [7.0.RC1] op_type: [ equal ] input: [ float, float32, float16, bool, int32, int64, int8, uint8 ] output: [ bool ] suggestion: *DataTypeSuggeation - DataTypeChecker: - cann_version: 7.0.RC1 + cann_version: [7.0.RC1] op_type: [ nonzero ] input: [ float16, bool, dt_bf16 ] output: [ int64 ] suggestion: *DataTypeSuggeation - DataTypeChecker: - cann_version: 7.0.RC1 + cann_version: [7.0.RC1] op_type: [ mul ] input: [ float, float32, float16, bool, int32, uint32, int64, uint64, int8, uint8, dt_bf16 ] output: [ float, float32, float16, bool, int32, uint32, int64, uint64, int8, uint8, dt_bf16 ] suggestion: *DataTypeSuggeation - DataTypeChecker: - cann_version: 7.0.0 + cann_version: [8.0.0, 7.0.0] op_type: [ __ALL__ ] ignore_type: [ cast, tensorequal, equal, nonzero, mul ] input: [ float, float32, float16, dt_bf16, float64, bool, int32, int64, int8, uint8, int16, complex64, complex128 ] @@ -53,28 +54,28 @@ CommonChecker: suggestion: *DataTypeSuggeation - DataTypeChecker: - cann_version: 7.0.0 + cann_version: [8.0.0, 7.0.0] op_type: [ cast ] input: [ float, float32, float16, bool, int32, uint32, int64, uint64, uint8, dt_bf16 ] output: [ float, float32, float16, bool, int32, uint32, int64, uint64, uint8, dt_bf16 ] suggestion: *DataTypeSuggeation - DataTypeChecker: - cann_version: 7.0.0 + cann_version: [8.0.0, 7.0.0] op_type: [ tensorequal ] input: [ float, float32, float16, dt_bf16, float64, bool, int32, int8, uint8 ] output: [ bool ] suggestion: *DataTypeSuggeation - DataTypeChecker: - cann_version: 7.0.0 + cann_version: [8.0.0, 7.0.0] op_type: [ equal ] input: [ float, float32, float16, dt_bf16, float64, bool, int32, int64, int8, uint8 ] output: [ bool ] suggestion: *DataTypeSuggeation - DataTypeChecker: - cann_version: 7.0.0 + cann_version: [8.0.0, 7.0.0] op_type: [ mul ] input: [ float, float32, float16, dt_bf16, float64, bool, int32, int64, int8, uint8, complex64 ] output: [ float, float32, float16, dt_bf16, float64, bool, int32, int64, int8, uint8, complex64 ] @@ -83,25 +84,20 @@ CommonChecker: ExampleGuideChecker: - IndexPutChecker: op_type: [index] - url: "https://wiki.huawei.com/domains/41510/wiki/76339/WIKI202311152358721?title=Index" + url: *AICPU_DOC_URL suggestion: 'Please modify source code followed by this LINK, try to replace index operator with equivalent operator.' - NonzeroChecker: op_type: [ indexput, indexputv2 ] - url: "https://wiki.huawei.com/domains/41510/wiki/76339/WIKI202311152358721?title=IndexPut" + url: *AICPU_DOC_URL suggestion: 'Please modify source code followed by this LINK, try to replace indexput operator with equivalent operator.' - CastChecker: op_type: [ argmin ] - url: "https://wiki.huawei.com/domains/41510/wiki/76339/WIKI202311152358721?title=ArgMin" + url: *AICPU_DOC_URL suggestion: 'Please update your cann-tookit to at least 7.0.RC1 version by this LINK.' - - CastChecker: - op_type: [ unique ] - url: "https://wiki.huawei.com/domains/41510/wiki/76339/WIKI202311152358721?title=unique" - suggestion: 'Please modify source code followed by this LINK, try to replace unique operator with equivalent operator.' - - CastChecker: op_type: [ nonzero ] - url: "https://wiki.huawei.com/domains/41510/wiki/76339/WIKI202311152358721?title=unique" + url: *AICPU_DOC_URL suggestion: 'Please modify source code followed by this LINK, try to replace nonzero operator with equivalent operator.' \ No newline at end of file diff --git a/profiler/advisor/utils/utils.py b/profiler/advisor/utils/utils.py index f338fc7dd4..638173c209 100644 --- a/profiler/advisor/utils/utils.py +++ b/profiler/advisor/utils/utils.py @@ -395,10 +395,12 @@ def join_prof_path(root_dir: str, sub_dir: str) -> str: for root, _, _ in os.walk(root_dir, onerror=walk_error_handler): if re.match(sub_dir, os.path.basename(root)): return root + logger.error("Fail to get profiling path %s from local path %s by regular expression matching", sub_dir, root_dir) else: sub_dir = os.path.join(root_dir, sub_dir) if os.path.exists(sub_dir): return sub_dir + logger.error("Fail to get profiling path %s from local path %s", sub_dir, root_dir) return "" diff --git a/profiler/test/tools/tool.py b/profiler/test/tools/tool.py index ee4b6f9bb1..574c371270 100644 --- a/profiler/test/tools/tool.py +++ b/profiler/test/tools/tool.py @@ -28,11 +28,3 @@ def run_command(cmd): p = Popen(shlex.split(cmd, posix=False), stdout=PIPE, bufsize=0, universal_newlines=False) p.wait() - -def init_env(): - test_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../", "data", - "asight-0.3.52.dev0+ge3f3b41.d20231111-py3-none-any.whl")) - try: - import asight - except Exception: - run_command(f"pip install {test_dir}") -- Gitee From c25fdd529e2406bd228cc772f84246841abd5aa2 Mon Sep 17 00:00:00 2001 From: shpity Date: Mon, 20 May 2024 20:06:35 +0800 Subject: [PATCH 17/21] fix bug on analyze all without graph data --- .../computation/op_compile/dynamic_shape_checker.py | 4 ++-- profiler/advisor/analyzer/computation/operator_checker.py | 2 +- .../advisor/analyzer/graph_fusion/graph_fusion_analyzer.py | 2 ++ profiler/advisor/dataset/graph_dataset.py | 6 +++--- profiler/advisor/dataset/profiling/profiling_dataset.py | 7 +++++-- profiler/advisor/utils/utils.py | 4 ++-- profiler/cli/entrance.py | 2 +- 7 files changed, 16 insertions(+), 11 deletions(-) diff --git a/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py b/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py index 7a3f718259..070b3a3b57 100644 --- a/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py +++ b/profiler/advisor/analyzer/computation/op_compile/dynamic_shape_checker.py @@ -28,7 +28,7 @@ class DynamicShapeChecker(OperatorChecker): less_than_cann800_list = [constant.CANN_VERSION_C30, constant.CANN_VERSION_C13, constant.CANN_VERSION_C15] # CANN 8.0.0 之前从 ge_info 中获取 op_state 属性,进行动态 shape 逻辑判断 if self.cann_version in less_than_cann800_list: - if hasattr(profiling_database, "ge_info") and profiling_database.ge_info.op_state_info_list is not None: + if hasattr(profiling_database, "ge_info"): ge_info = profiling_database.ge_info static_shape_operators = ge_info.get_static_shape_operators() if len(static_shape_operators) == 0: @@ -41,7 +41,7 @@ class DynamicShapeChecker(OperatorChecker): "More details please refer to link : %s", constant.ASCEND_PROFILER_URL) else: # CANN 8.0.0 之后 op_state 属性从 op_summary 文件中获取 - if hasattr(profiling_database, "op_summary") and len(profiling_database.op_summary.op_list) > 0: + if hasattr(profiling_database, "op_summary"): static_shape_operators = profiling_database.op_summary.get_static_shape_operators() if len(static_shape_operators) == 0: OperatorChecker.IS_ALL_OPERATOR_DYNAMIC_SHAPE = True diff --git a/profiler/advisor/analyzer/computation/operator_checker.py b/profiler/advisor/analyzer/computation/operator_checker.py index d714e0952c..6bb837004b 100644 --- a/profiler/advisor/analyzer/computation/operator_checker.py +++ b/profiler/advisor/analyzer/computation/operator_checker.py @@ -243,7 +243,7 @@ class OperatorChecker(VersionControl): return op_type_list def _check_summary(self, data: ProfilingDataset): - if not data.op_summary: + if not hasattr(data, "op_summary"): logger.warning(self.SKIP_CHECK_MSG, self._CHECKER, "op summary") return False return True diff --git a/profiler/advisor/analyzer/graph_fusion/graph_fusion_analyzer.py b/profiler/advisor/analyzer/graph_fusion/graph_fusion_analyzer.py index 059950089d..713e118429 100644 --- a/profiler/advisor/analyzer/graph_fusion/graph_fusion_analyzer.py +++ b/profiler/advisor/analyzer/graph_fusion/graph_fusion_analyzer.py @@ -31,6 +31,8 @@ class FusionOPAnalyzer(BaseAnalyzer): def _check(self, graph_data: List[GraphDataset], profiling_data: List[ProfilingDataset] = None) -> None: + if len(graph_data) == 0 or graph_data[0].is_empty(): + return for _, rule in self.RULES.items(): checker = rule() if profiling_data is None: diff --git a/profiler/advisor/dataset/graph_dataset.py b/profiler/advisor/dataset/graph_dataset.py index c6dd0448b4..951de7fd26 100644 --- a/profiler/advisor/dataset/graph_dataset.py +++ b/profiler/advisor/dataset/graph_dataset.py @@ -44,10 +44,10 @@ class GraphDataset(Dataset): graph.build() graphs.append(graph) graphs.sort(key=lambda g: g.name) - del self.graph_files[0] # remove previous useless data + if len(self.graph_files) >= 1: + del self.graph_files[0] # remove previous useless data return graphs - @property def is_empty(self) -> bool: """check empty graph dataset""" - return len(self.graphs()) == 0 + return len(self.graph_files) == 0 diff --git a/profiler/advisor/dataset/profiling/profiling_dataset.py b/profiler/advisor/dataset/profiling/profiling_dataset.py index 836f30905f..46d4a4fe8b 100644 --- a/profiler/advisor/dataset/profiling/profiling_dataset.py +++ b/profiler/advisor/dataset/profiling/profiling_dataset.py @@ -45,8 +45,11 @@ class ProfilingDataset(Dataset): data_class = globals()[self.current_version_pattern.get('class_attr').get(item)] data_class.FILE_PATTERN = self.current_version_pattern.get('file_attr').get(item) data_object = data_class(current_path) - data_object.parse_data() - setattr(self, item, data_object) + is_success = data_object.parse_data() + if is_success: + setattr(self, item, data_object) + else: + logger.warning("Skip parse %s from local path %s", self.current_version_pattern.get('class_attr').get(item), current_path) else: logger.warning(f"Unsupported arguments : %s to build %s", dirs_pattern, self.__class__.__name__) diff --git a/profiler/advisor/utils/utils.py b/profiler/advisor/utils/utils.py index 638173c209..84419b6708 100644 --- a/profiler/advisor/utils/utils.py +++ b/profiler/advisor/utils/utils.py @@ -395,12 +395,12 @@ def join_prof_path(root_dir: str, sub_dir: str) -> str: for root, _, _ in os.walk(root_dir, onerror=walk_error_handler): if re.match(sub_dir, os.path.basename(root)): return root - logger.error("Fail to get profiling path %s from local path %s by regular expression matching", sub_dir, root_dir) + logger.debug("Fail to get profiling path %s from local path %s by regular expression matching", sub_dir, root_dir) else: sub_dir = os.path.join(root_dir, sub_dir) if os.path.exists(sub_dir): return sub_dir - logger.error("Fail to get profiling path %s from local path %s", sub_dir, root_dir) + logger.debug("Fail to get profiling path %s from local path %s", sub_dir, root_dir) return "" diff --git a/profiler/cli/entrance.py b/profiler/cli/entrance.py index 8bc5a364ac..031fa76d6a 100644 --- a/profiler/cli/entrance.py +++ b/profiler/cli/entrance.py @@ -54,6 +54,6 @@ advisor_cli.add_command(compare_cli, name="compare") if __name__ == '__main__': advisor_cli.main( ["analyze", "all", "-d", - r"C:\Users\admin\Downloads\llama-13B\llama-13b", + "C:\xxx\profiling_data" ] ) -- Gitee From 27614fe465a34ff1a1bc9aa693b837344094f5bd Mon Sep 17 00:00:00 2001 From: wuyuhan Date: Tue, 21 May 2024 17:38:17 +0800 Subject: [PATCH 18/21] =?UTF-8?q?=E5=B1=95=E7=A4=BAupdate=E5=91=BD?= =?UTF-8?q?=E4=BB=A4,=E7=94=A8=E4=BA=8E=E8=B7=9F=E6=96=B0advisor=E8=A7=84?= =?UTF-8?q?=E5=88=99=E5=BA=93?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- MANIFEST.in | 1 + .../analyzer/cluster/__init__.py} | 0 profiler/advisor/common/constant.py | 4 +- profiler/advisor/dataset/cluster/__init__.py | 0 profiler/cli/analyze_cli.py | 41 +------------------ profiler/cli/cluster_cli.py | 27 ++++++++++++ profiler/cli/compare_cli.py | 13 ++---- profiler/cli/entrance.py | 20 ++++----- profiler/cli/update_cli.py | 40 ------------------ 9 files changed, 41 insertions(+), 105 deletions(-) rename profiler/{cli/query_cli.py => advisor/analyzer/cluster/__init__.py} (100%) create mode 100644 profiler/advisor/dataset/cluster/__init__.py delete mode 100644 profiler/cli/update_cli.py diff --git a/MANIFEST.in b/MANIFEST.in index cfadbde1db..6068a6df08 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,4 @@ recursive-include profiler/ * +recursive-include profiler/advisor/display/html/templates * global-exclude */__pycache__/* global-exclude *.pyc diff --git a/profiler/cli/query_cli.py b/profiler/advisor/analyzer/cluster/__init__.py similarity index 100% rename from profiler/cli/query_cli.py rename to profiler/advisor/analyzer/cluster/__init__.py diff --git a/profiler/advisor/common/constant.py b/profiler/advisor/common/constant.py index 6b10b5c55a..4c3fc42ff3 100644 --- a/profiler/advisor/common/constant.py +++ b/profiler/advisor/common/constant.py @@ -113,9 +113,9 @@ COMMON_ENDPOINT_SUFFIX = "obs.{}.myhuaweicloud.com" INNER_ENDPOINT_SUFFIX= "obs.{}.ulanqab.huawei.com" AICPU_RULES_YAML_NAME = "aicpu_rules.yaml" -FUSSION_PASS_YAML_NAME = "op_fussion_pass.yaml" +FUSION_PASS_YAML_NAME = "op_fusion_pass.yaml" TIMELINE_FUSION_OPS_YAML_NAME = "timeline_fusion_ops.yaml" -CLOUD_YAML_NAME_LIST = [AICPU_RULES_YAML_NAME, FUSSION_PASS_YAML_NAME, TIMELINE_FUSION_OPS_YAML_NAME] +CLOUD_YAML_NAME_LIST = [AICPU_RULES_YAML_NAME, FUSION_PASS_YAML_NAME, TIMELINE_FUSION_OPS_YAML_NAME] MAX_RETRIES = 3 TIMEOUT = 3 diff --git a/profiler/advisor/dataset/cluster/__init__.py b/profiler/advisor/dataset/cluster/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/profiler/cli/analyze_cli.py b/profiler/cli/analyze_cli.py index 86af711564..8e05610064 100644 --- a/profiler/cli/analyze_cli.py +++ b/profiler/cli/analyze_cli.py @@ -85,25 +85,6 @@ def analyze_all(**kwargs) -> None: _analyze(Interface.all_dimension, **kwargs) -@analyze_cli.command(context_settings=CONTEXT_SETTINGS, - name="communication", - short_help='Analyze timeline, operators and graph.') -@click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, - help='Directory of profiling data') -@click.option('--cann_version', '-cv', 'cann_version', - type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), - default=constant.DEFAULT_CANN_VERSION, - help='The CANN software version, which can be viewed by executing the following command: ' - '"cat /usr/local/Ascend/ascend-toolkit/latest/aarch64-linux/ascend_toolkit_install.info"') -@click.option('--torch_version', '-tv', 'torch_version', - type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), - default=constant.DEFAULT_TORCH_VERSION, - help='The runtime torch version, which can be detected by exec command "pip show torch"') -@debug_option -def analyze_communication(**kwargs) -> None: - _analyze(["communication"], **kwargs) - - @analyze_cli.command(context_settings=CONTEXT_SETTINGS, name="schedule", short_help='Analyze timeline, operators and graph.') @@ -146,24 +127,4 @@ def analyze_schedule(**kwargs) -> None: help="enter the profiling type, selectable range ascend_pytorch_profiler, mslite ,msprof") @debug_option def analyze_computation(**kwargs) -> None: - _analyze(["computation"], **kwargs) - - -@analyze_cli.command(context_settings=CONTEXT_SETTINGS, - name="dataloader", - short_help='Analyze timeline, operators and graph.') -@click.option('--profiling_path', '-d', 'profiling_path', type=click.Path(), required=True, - help='Directory of profiling data') -@click.option('--cann_version', '-cv', 'cann_version', - type=click.Choice(constant.SUPPORTED_CANN_VERSION, case_sensitive=False), - default=constant.DEFAULT_CANN_VERSION, - help='The CANN software version, which can be viewed by executing the following command: ' - '"cat /usr/local/Ascend/ascend-toolkit/latest/aarch64-linux/ascend_toolkit_install.info"') -@click.option('--torch_version', '-tv', 'torch_version', - type=click.Choice(constant.SUPPORTED_TORCH_VERSION, case_sensitive=False), - default=constant.DEFAULT_TORCH_VERSION, - help='The runtime torch version, which can be detected by exec command "pip show torch"') -@click.option('--is_inference', is_flag=True, help="Enable performance analysis of inference task") -@debug_option -def analyze_dataloader(**kwargs) -> None: - _analyze(["dataloader"], **kwargs) + _analyze(["computation"], **kwargs) \ No newline at end of file diff --git a/profiler/cli/cluster_cli.py b/profiler/cli/cluster_cli.py index e69de29bb2..9ce8e45058 100644 --- a/profiler/cli/cluster_cli.py +++ b/profiler/cli/cluster_cli.py @@ -0,0 +1,27 @@ +import ast +import click +import os +import sys + +sys.path.append(os.path.dirname(os.path.dirname(__file__))) + +from profiler.advisor.utils.tools import CONTEXT_SETTINGS, ClickAliasedGroup +from profiler.advisor.utils.utils import debug_option +from profiler.cluster_analyse.common_func.constant import Constant +from profiler.cluster_analyse.cluster_analysis import ClusterAnalysis + + +@click.command(context_settings=CONTEXT_SETTINGS, + name="cluster", + short_help='The analysis of cluster profiling data') +@click.option('--profiling_path', '-d', 'base_profiling_path', type=click.Path(), required=True, + help='profiling data path') +@click.option('--mode', '-m', 'mode', type=click.Choice(['all', 'communication_time', 'communication_matrix']), + default='all', help="different analysis mode") +@debug_option +def cluster_cli(**kwargs) -> None: + parameter = { + Constant.COLLECTION_PATH: kwargs.get("base_profiling_path"), + Constant.ANALYSIS_MODE: kwargs.get("mode") + } + ClusterAnalysis(parameter).run() diff --git a/profiler/cli/compare_cli.py b/profiler/cli/compare_cli.py index a4e69653f2..2bce1f82c4 100644 --- a/profiler/cli/compare_cli.py +++ b/profiler/cli/compare_cli.py @@ -10,15 +10,8 @@ from profiler.advisor.utils.utils import debug_option from profiler.advisor.common.timeline.event import AdvisorDict from profiler.compare_tools.compare_backend.comparison_generator import ComparisonGenerator - -@click.group(name="compare", cls=ClickAliasedGroup) -def compare_cli(**kwargs): - """Query operator details from timeline.""" - pass - - -@compare_cli.command(context_settings=CONTEXT_SETTINGS, - name="profiling", +@click.command(context_settings=CONTEXT_SETTINGS, + name="compare", short_help='Analyze timeline for specific operator and report detail code stacks.') @click.option('--profiling_path', '-d', 'base_profiling_path', type=click.Path(), required=True, help='path of trace_view.json in profiling') @@ -33,6 +26,6 @@ def compare_cli(**kwargs): @click.option('--use_input_shape', is_flag=True) @click.option('--gpu_flow_cat', type=str, default='', help="gpu flow event的分类标识") @debug_option -def compare_profiling(**kwargs) -> None: +def compare_cli(**kwargs) -> None: args = AdvisorDict(kwargs) ComparisonGenerator(args).run() diff --git a/profiler/cli/entrance.py b/profiler/cli/entrance.py index 031fa76d6a..ef0815ce11 100644 --- a/profiler/cli/entrance.py +++ b/profiler/cli/entrance.py @@ -6,7 +6,7 @@ import click from profiler.cli.analyze_cli import analyze_cli from profiler.cli.complete_cli import auto_complete_cli from profiler.cli.compare_cli import compare_cli - +from profiler.cli.cluster_cli import cluster_cli from profiler.advisor.version import print_version_callback, cli_version logger = logging.getLogger() @@ -14,9 +14,9 @@ CONTEXT_SETTINGS = dict(help_option_names=['-H', '-h', '--help'], max_content_width=160) COMMAND_PRIORITY = { - "analyze": 1, - "query": 2, - "env": 3, + "advisor": 1, + "compare": 2, + "cluster": 3, "auto-completion": 4 } @@ -47,13 +47,7 @@ def advisor_cli(**kwargs): pass -advisor_cli.add_command(analyze_cli, name="analyze") -advisor_cli.add_command(auto_complete_cli, name="auto-completion") +advisor_cli.add_command(analyze_cli, name="advisor") advisor_cli.add_command(compare_cli, name="compare") - -if __name__ == '__main__': - advisor_cli.main( - ["analyze", "all", "-d", - "C:\xxx\profiling_data" - ] - ) +advisor_cli.add_command(cluster_cli, name="cluster") +advisor_cli.add_command(auto_complete_cli, name="auto-completion") \ No newline at end of file diff --git a/profiler/cli/update_cli.py b/profiler/cli/update_cli.py deleted file mode 100644 index 9407981ae0..0000000000 --- a/profiler/cli/update_cli.py +++ /dev/null @@ -1,40 +0,0 @@ -from urllib import parse - -import click - -from profiler.advisor.common import constant -from profiler.advisor.config.config import Config -from profiler.advisor.utils.tools import CONTEXT_SETTINGS, ClickAliasedGroup -from profiler.advisor.utils.utils import debug_option, request_with_retry - - -@click.group(name="update", cls=ClickAliasedGroup) -def update_cli(**kwargs): - """Update operation command, such as update rule and specify save path.""" - pass - - -@update_cli.command(context_settings=CONTEXT_SETTINGS, - name="rule", - short_help='Update the ma-advisor rules on the terminal. The default save path is ' - '"~/rules/cloud/". If user want to specify the save path, please use the environment ' - 'variable "ADVISOR_RULE_PATH"') -@click.option('--region', '-r', type=click.Choice(constant.CLOUD_RULE_REGION_LIST), required=True, - default=constant.DEFAULT_CLOUD_RULE_REGION, - help='Specifies the region where the rule file is downloaded.') -@debug_option -def update_rule(**kwargs) -> None: - """ - Download the latest rule yaml file. - """ - region_name = kwargs.get("region") - rule_bucket = Config().config.get(constant.RULE_BUCKET, region_name) - rule_endpoint_suffix = constant.COMMON_ENDPOINT_SUFFIX.format(region_name) - if region_name in constant.INNER_REGION_LIST: - rule_endpoint_suffix = constant.INNER_ENDPOINT_SUFFIX.format(region_name) - - obs_url = constant.HTTPS_PREFIXES + rule_bucket + "." + rule_endpoint_suffix - obs_url = parse.urljoin(obs_url, constant.COMMON_YAML_DIR) - for file_name in constant.CLOUD_YAML_NAME_LIST: - url = parse.urljoin(obs_url, file_name) - request_with_retry(url, region_name) -- Gitee From ab4c5a4e1b3be7d906b80b7e9e76500ff1370c60 Mon Sep 17 00:00:00 2001 From: wuyuhan Date: Wed, 22 May 2024 11:11:51 +0800 Subject: [PATCH 19/21] interface bug fix --- profiler/advisor/__init__.py | 1 + profiler/advisor/common/analyzer_scopes.py | 2 +- profiler/advisor/interface/interface.py | 8 ++++++-- profiler/advisor/result/result.py | 3 ++- profiler/cli/complete_cli.py | 6 +++--- 5 files changed, 13 insertions(+), 7 deletions(-) diff --git a/profiler/advisor/__init__.py b/profiler/advisor/__init__.py index e69de29bb2..cea5ae3406 100644 --- a/profiler/advisor/__init__.py +++ b/profiler/advisor/__init__.py @@ -0,0 +1 @@ +from profiler.advisor.interface.interface import Interface \ No newline at end of file diff --git a/profiler/advisor/common/analyzer_scopes.py b/profiler/advisor/common/analyzer_scopes.py index 0c6a2ac260..4041aa0483 100644 --- a/profiler/advisor/common/analyzer_scopes.py +++ b/profiler/advisor/common/analyzer_scopes.py @@ -6,4 +6,4 @@ class SupportedScopes: GRAPH = "graph" SLOW_RANK = "slow_rank" SLOW_LINK = "slow_link" - PORFILING_OPERATOR_ANALYSIS = "profiling_operator_analysis" + PROFILING_OPERATOR_ANALYSIS = "profiling_operator_analysis" diff --git a/profiler/advisor/interface/interface.py b/profiler/advisor/interface/interface.py index 4ac062ce6c..1ee6e62af5 100644 --- a/profiler/advisor/interface/interface.py +++ b/profiler/advisor/interface/interface.py @@ -1,5 +1,8 @@ import os from collections import OrderedDict +import sys +sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), "cluster_analyse")) +sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), "compare_tools")) from profiler.advisor.utils.utils import Timer from profiler.advisor.analyzer.computation.profiling_analyzer import ProfilingAnalyzer @@ -16,7 +19,7 @@ class Interface: SupportedScopes.TIMELINE_FUSION_OPS: TimelineFusionOpsAnalyzer }), "computation": OrderedDict({ - SupportedScopes.PORFILING_OPERATOR_ANALYSIS: ProfilingAnalyzer, + SupportedScopes.PROFILING_OPERATOR_ANALYSIS: ProfilingAnalyzer, SupportedScopes.GRAPH: FusionOPAnalyzer }), "communication": OrderedDict(), @@ -59,7 +62,8 @@ class Interface: if hasattr(analyzer, "html_render"): analyzer.html_render.render_html() analyzer.html_render.save_to_file(f'att_advisor_{Timer().strftime}.html') - return result if not output_dict else result.data.get(getattr(SupportedScopes, scope.upper())) + + return result if not output_dict else dict(result.data) if __name__ == "__main__": diff --git a/profiler/advisor/result/result.py b/profiler/advisor/result/result.py index 30b8f5795c..06a515e783 100644 --- a/profiler/advisor/result/result.py +++ b/profiler/advisor/result/result.py @@ -90,7 +90,8 @@ class SheetRecoder: if not isinstance(self._sheet_data[sheet_name].get("data"), list): self._sheet_data[sheet_name]["data"] = [] - self._sheet_data[sheet_name]["data"].append(data) + if data not in self._sheet_data[sheet_name]["data"]: + self._sheet_data[sheet_name]["data"].append(data) @singleton diff --git a/profiler/cli/complete_cli.py b/profiler/cli/complete_cli.py index e4fa0caf3f..28f00c5866 100644 --- a/profiler/cli/complete_cli.py +++ b/profiler/cli/complete_cli.py @@ -18,11 +18,11 @@ def auto_complete_cli(shell_type): """ click.echo("Tips: please paste following shell command to your terminal to activate auto completion.\n") if shell_type.lower() == "bash": - bash_str = 'eval "$(_advisor_COMPLETE=bash_source ma-advisor)"' + bash_str = 'eval "$(_advisor_COMPLETE=bash_source msprof-analyze)"' elif shell_type.lower() == "zsh": - bash_str = 'eval "$(_advisor_COMPLETE=zsh_source ma-advisor)"' + bash_str = 'eval "$(_advisor_COMPLETE=zsh_source msprof-analyze)"' elif shell_type.lower() == "fish": - bash_str = 'eval (env _advisor_COMPLETE=fish_source ma-advisor)' + bash_str = 'eval (env _advisor_COMPLETE=fish_source msprof-analyze)' else: click.echo(f'Unsupported shell type {shell_type}.') return -- Gitee From 568f3bb5fae2a23cb0ce4202d40b6e9ef5ab8853 Mon Sep 17 00:00:00 2001 From: PersonalC Date: Wed, 22 May 2024 17:29:26 +0800 Subject: [PATCH 20/21] bugfix: auto-completion --- profiler/cli/complete_cli.py | 8 ++++---- profiler/requirements/build.txt | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/profiler/cli/complete_cli.py b/profiler/cli/complete_cli.py index 28f00c5866..ebf2cbf30b 100644 --- a/profiler/cli/complete_cli.py +++ b/profiler/cli/complete_cli.py @@ -14,15 +14,15 @@ def auto_complete_cli(shell_type): \b # print bash auto complete command to terminal - ma-advisor auto-completion Bash + msprof-analyze auto-completion Bash """ click.echo("Tips: please paste following shell command to your terminal to activate auto completion.\n") if shell_type.lower() == "bash": - bash_str = 'eval "$(_advisor_COMPLETE=bash_source msprof-analyze)"' + bash_str = 'eval "$(_MSPROF_ANALYZE_COMPLETE=bash_source msprof-analyze)"' elif shell_type.lower() == "zsh": - bash_str = 'eval "$(_advisor_COMPLETE=zsh_source msprof-analyze)"' + bash_str = 'eval "$(_MSPROF_ANALYZE_COMPLETE=zsh_source msprof-analyze)"' elif shell_type.lower() == "fish": - bash_str = 'eval (env _advisor_COMPLETE=fish_source msprof-analyze)' + bash_str = 'eval (env _MSPROF_ANALYZE_COMPLETE=fish_source msprof-analyze)' else: click.echo(f'Unsupported shell type {shell_type}.') return diff --git a/profiler/requirements/build.txt b/profiler/requirements/build.txt index c750ff83de..d184b170c3 100644 --- a/profiler/requirements/build.txt +++ b/profiler/requirements/build.txt @@ -9,4 +9,5 @@ ijson requests xlsxwriter sqlalchemy -urllib3<2.0 \ No newline at end of file +urllib3<2.0 +numpy \ No newline at end of file -- Gitee From 5f096ddc5dfef23a82656a65a13ff5af27a0a116 Mon Sep 17 00:00:00 2001 From: fanxiaotong Date: Wed, 22 May 2024 18:53:16 +0800 Subject: [PATCH 21/21] bugfix --- .../analyzer/cluster/slow_link_analyser.py | 15 +++++-- .../analyzer/cluster/slow_rank_analyser.py | 13 ++++-- .../overall/overall_summary_analyzer.py | 40 ++++++++++++------- profiler/advisor/common/analyzer_scopes.py | 1 + .../dataset/cluster/cluster_dataset.py | 14 ++++++- profiler/advisor/interface/interface.py | 2 +- 6 files changed, 62 insertions(+), 23 deletions(-) diff --git a/profiler/advisor/analyzer/cluster/slow_link_analyser.py b/profiler/advisor/analyzer/cluster/slow_link_analyser.py index e9143ae1de..52da3965f6 100644 --- a/profiler/advisor/analyzer/cluster/slow_link_analyser.py +++ b/profiler/advisor/analyzer/cluster/slow_link_analyser.py @@ -46,9 +46,15 @@ class SlowLinkAnalyzer(BaseAnalyzer): self.result = OptimizeResult() self.bottelneck = '' self.suggestion = '' + self.format_datas = [] def optimize(self, **kwargs): + if self.rank_bw_dict is None: + print("slow_link 分析失败,原因是数据加载失败,请检查你的cluster_analysis_outpu文件夹, \ + 如不关心这类数据请忽略") + return self.result self.process() + self.format_datas = self.format_details() self.make_record() self.make_render() return self.result @@ -74,12 +80,12 @@ class SlowLinkAnalyzer(BaseAnalyzer): headers = ['rank_id'] + list(self.rank_bw_dict[0].keys()) data_list = [] for rank_id, rank_bw in self.rank_bw_dict.items(): - data_list.append([rank_id] + list(rank_bw.keys())) + data_list.append([rank_id] + list(rank_bw.values())) details_dict["headers"] = headers details_dict["data"] = data_list - return [details_dict] + return details_dict def make_record(self): """ @@ -92,11 +98,14 @@ class SlowLinkAnalyzer(BaseAnalyzer): ) self.result.add(OptimizeRecord(optimization_item)) + for i, data in enumerate(self.format_datas["data"]): + self.result.add_detail(SlowLinkAnalyzer.SLOW_LINK_ANALYSIS, self.format_datas["headers"], data) + def make_render(self): result_for_html = { "Description" : self.bottelneck, "suggestion" : self.suggestion, - "details" : self.format_details() + "details" : [self.format_datas] } self.html_render.render_template(key="cluster", diff --git a/profiler/advisor/analyzer/cluster/slow_rank_analyser.py b/profiler/advisor/analyzer/cluster/slow_rank_analyser.py index b49ef5ec8c..6b1400485e 100644 --- a/profiler/advisor/analyzer/cluster/slow_rank_analyser.py +++ b/profiler/advisor/analyzer/cluster/slow_rank_analyser.py @@ -38,10 +38,15 @@ class SlowRankAnalyzer(BaseAnalyzer): self.result = OptimizeResult() self.bottelneck = '' self.suggestion = '' + self.format_datas = [] def optimize(self, **kwargs): + if self.step_trace_dict is None: + print("slow_rank 分析失败,原因是数据加载失败,请检查你的cluster_analysis_outpu文件夹 \ + 如不关心这类数据请忽略") + return self.result self.process() - + self.format_datas = self.format_details() self.make_record() self.make_render() return self.result @@ -71,6 +76,8 @@ class SlowRankAnalyzer(BaseAnalyzer): [""] ) self.result.add(OptimizeRecord(optimization_item)) + for i, data in enumerate(self.format_datas["data"]): + self.result.add_detail(SlowRankAnalyzer.SLOW_RANK_ANALYSIS, self.format_datas["headers"], data) def format_details(self): details_dict = {} @@ -80,13 +87,13 @@ class SlowRankAnalyzer(BaseAnalyzer): data_list.append([key] + value) details_dict["headers"] = headers details_dict["data"] = data_list - return [details_dict] + return details_dict def make_render(self): result_for_html = { "Description" : self.bottelneck, "suggestion" : self.suggestion, - "details" : self.format_details() + "details" : [self.format_datas] } self.html_render.render_template(key="cluster", diff --git a/profiler/advisor/analyzer/overall/overall_summary_analyzer.py b/profiler/advisor/analyzer/overall/overall_summary_analyzer.py index f46bb2c1d5..b7c320b7aa 100644 --- a/profiler/advisor/analyzer/overall/overall_summary_analyzer.py +++ b/profiler/advisor/analyzer/overall/overall_summary_analyzer.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import os +import copy import logging from typing import Dict, List @@ -24,14 +25,15 @@ from profiler.advisor.analyzer.base_analyzer import BaseAnalyzer from profiler.compare_tools.compare_backend.utils.constant import Constant from profiler.advisor.common import constant as const from profiler.compare_tools.compare_interface.comparison_interface import ComparisonInterface +from profiler.advisor.utils.utils import get_file_path_from_directory, load_parameter class OverallSummaryAnalyzer(BaseAnalyzer): OVERALL_SUMMARY_ANALYZER = "overall_summary_analysis" advice_map = { - "Computing Time": "if you want more detailed advice please go to compute_perf_analysis.ipynb.", - "Uncovered Communication Time": "if you want more detailed advice please go to cluster_perf_analysis.ipynb.", - "Free Time": "if you want more detailed advice please go to timeline_perf_analysis.ipynb." + "Computing Time": "if you want more detailed advice please go to att_advisor_*.html", + "Uncovered Communication Time": "if you want more detailed advice please go to att_advisor_*.html", + "Free Time": "if you want more detailed advice please go to att_advisor_*.html" } time_name_map = { "Computing Time": "computing", @@ -53,7 +55,8 @@ class OverallSummaryAnalyzer(BaseAnalyzer): def __init__(self, collection_path: str, n_processes: int = 1, cann_version=const.DEFAULT_CANN_VERSION, torch_version=const.DEFAULT_TORCH_VERSION, **kwargs): - super().__init__(collection_path, n_processes, cann_version, torch_version, **kwargs) + profile_path = get_profile_path(collection_path) + super().__init__(profile_path, n_processes, cann_version, torch_version, **kwargs) self.base_collection_path = kwargs.get("base_collection_path", "") self._has_base_collection = False self._is_minimal_profiling = False @@ -204,14 +207,16 @@ class OverallSummaryAnalyzer(BaseAnalyzer): def format_cur_data(self): if not self.cur_data: return - data_table = {} for data_type, data in self.cur_data.items(): - if data: - headers = [key for key in data] - data_list = [data[key] for key in data] - data_table["headers"] = headers - data_table["data"] = [data_list] - self.cur_data_table[data_type] = data_table + if not data: + continue + if data_type not in list(self.time_name_map.values()): + data_list = list(data.values()) + else: + data_list = [','.join(map(str, value)) for value in data.values()] + headers = list(data.keys()) + data_table = {"headers": headers, "data": [data_list]} + self.cur_data_table[data_type] = copy.deepcopy(data_table) def make_record(self): @@ -226,9 +231,9 @@ class OverallSummaryAnalyzer(BaseAnalyzer): self.result.add(OptimizeRecord(optimization_item)) self.result.add_detail(const.BOTTLENECK, self.bottleneck_table["headers"], self.bottleneck_table["data"][0]) - for data_type, data in self.cur_data.items(): - if data: - self.result.add_detail(const.DATA + data_type, self.cur_data_table[data_type]["headers"], self.cur_data_table[data_type]["data"][0]) + for data_type, data_dict in self.cur_data_table.items(): + if data_dict: + self.result.add_detail(const.DATA + data_type, data_dict["headers"], data_dict["data"][0]) def make_render(self): result_for_html = { @@ -244,3 +249,10 @@ class OverallSummaryAnalyzer(BaseAnalyzer): cann_version=self.cann_version, torch_version=self.torch_version, result=result_for_html) + +def get_profile_path(collection_path): + for root, dirs, files in os.walk(collection_path): + for file in files: + if file.startswith("profiler_info"): + return root + return None \ No newline at end of file diff --git a/profiler/advisor/common/analyzer_scopes.py b/profiler/advisor/common/analyzer_scopes.py index 4041aa0483..44f09d0a58 100644 --- a/profiler/advisor/common/analyzer_scopes.py +++ b/profiler/advisor/common/analyzer_scopes.py @@ -6,4 +6,5 @@ class SupportedScopes: GRAPH = "graph" SLOW_RANK = "slow_rank" SLOW_LINK = "slow_link" + OVER_ALL = "over_all" PROFILING_OPERATOR_ANALYSIS = "profiling_operator_analysis" diff --git a/profiler/advisor/dataset/cluster/cluster_dataset.py b/profiler/advisor/dataset/cluster/cluster_dataset.py index b8daedab08..94527cdf5b 100644 --- a/profiler/advisor/dataset/cluster/cluster_dataset.py +++ b/profiler/advisor/dataset/cluster/cluster_dataset.py @@ -71,7 +71,12 @@ class ClusterStepTraceTimeDataSet(ClusterDataset): def _parse(self): self.cluster_analyze() - step_data = self.load_csv_data(const.CLUSTER_STEP_TIME_CSV, ClusterStepTraceTimeBean) + try: + step_data = self.load_csv_data(const.CLUSTER_STEP_TIME_CSV, ClusterStepTraceTimeBean) + except RuntimeError as e: + print("捕获到异常:", e) + self._step_dict = None + return False self._step_dict = self.formate_data(step_data) return True @@ -120,7 +125,12 @@ class ClusterCommunicationDataSet(ClusterDataset): def _parse(self): self.cluster_analyze() - communication_json = self.load_json_data(const.CLUSTER_COMM_JSON) + try: + communication_json = self.load_json_data(const.CLUSTER_COMM_JSON) + except RuntimeError as e: + print("捕获到异常:", e) + self.rank_bw_dict = None + return False self.process(communication_json) return True diff --git a/profiler/advisor/interface/interface.py b/profiler/advisor/interface/interface.py index 1ee6e62af5..c0d04db8eb 100644 --- a/profiler/advisor/interface/interface.py +++ b/profiler/advisor/interface/interface.py @@ -23,7 +23,7 @@ class Interface: SupportedScopes.GRAPH: FusionOPAnalyzer }), "communication": OrderedDict(), - "overall": OrderedDict(), + "overall": OrderedDict({SupportedScopes.OVER_ALL: OverallSummaryAnalyzer}), "dataloader": OrderedDict(), "cluster": OrderedDict({ SupportedScopes.SLOW_RANK: SlowRankAnalyzer, -- Gitee